From f13069581271a1ce9ebfa70283a7ee6e32a02924 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 25 Jun 2019 20:04:24 +0100 Subject: [PATCH 001/140] Allow extra time for a warning to be logged (#43597) Today we assert that a warning is logged after no more than `discovery.cluster_formation_warning_timeout`, but the deterministic scheduler adds a small amount of extra randomness to the timing of future events, causing the following build to fail: ./gradlew :server:test --tests "org.elasticsearch.cluster.coordination.CoordinatorTests.testLogsWarningPeriodicallyIfClusterNotFormed" -Dtests.seed=DF35C28D4FA9EE2D This commit adds an allowance for this extra time. --- .../elasticsearch/cluster/coordination/CoordinatorTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index 261abfbfac7bc..d7832f04d9589 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -1208,7 +1208,7 @@ public void assertMatched() { equalTo(cluster.clusterNodes.stream().map(ClusterNode::getLocalNode).collect(Collectors.toSet()))); } }); - cluster.runFor(warningDelayMillis, "waiting for warning to be emitted"); + cluster.runFor(warningDelayMillis + DEFAULT_DELAY_VARIABILITY, "waiting for warning to be emitted"); mockLogAppender.assertAllExpectationsMatched(); } finally { mockLogAppender.stop(); From 5f6321aacb95d2b842c74b6f7dc064c70a15e33c Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 25 Jun 2019 15:24:44 -0400 Subject: [PATCH 002/140] [DOCS] Rewrite `range` query (#43282) --- docs/reference/query-dsl/range-query.asciidoc | 268 +++++++++++------- 1 file changed, 166 insertions(+), 102 deletions(-) diff --git a/docs/reference/query-dsl/range-query.asciidoc b/docs/reference/query-dsl/range-query.asciidoc index 61c46996949f1..27db882fe1dd3 100644 --- a/docs/reference/query-dsl/range-query.asciidoc +++ b/docs/reference/query-dsl/range-query.asciidoc @@ -1,14 +1,16 @@ [[query-dsl-range-query]] === Range Query -Matches documents with fields that have terms within a certain range. -The type of the Lucene query depends on the field type, for `string` -fields, the `TermRangeQuery`, while for number/date fields, the query is -a `NumericRangeQuery`. The following example returns all documents where -`age` is between `10` and `20`: +Returns documents that contain terms within a provided range. + +[[range-query-ex-request]] +==== Example request + +The following search returns documents where the `age` field contains a term +between `10` and `20`. [source,js] --------------------------------------------------- +---- GET _search { "query": { @@ -21,147 +23,209 @@ GET _search } } } --------------------------------------------------- +---- // CONSOLE -The `range` query accepts the following parameters: +[[range-query-top-level-params]] +==== Top-level parameters for `range` + +``:: ++ +-- +Field you wish to search. +-- + +[[range-query-field-params]] +==== Parameters for `` + +`gt`:: +Greater than. Optional. + +`gte`:: +Greater than or equal to. Optional. + +`lt`:: +Less than. Optional. + +`lte`:: +Less than or equal to. Optional. + +`format`:: ++ +-- +Date format used to convert `date` values in the query. + +By default, {es} uses the <> provided in the +``'s mapping. This value overrides that mapping format. -[horizontal] -`gte`:: Greater-than or equal to -`gt`:: Greater-than -`lte`:: Less-than or equal to -`lt`:: Less-than -`boost`:: Sets the boost value of the query, defaults to `1.0` +For valid syntax, see <>. Optional. +[WARNING] +==== +If a `format` and `date` value are incomplete, {es} replaces any missing year, +month, or date component with the start of +https://en.wikipedia.org/wiki/Unix_time[Unix time], which is January 1st, 1970. + +For example, if the `format` value is `dd`, {es} converts a `gte` value of `10` +to `1970-01-10T00:00:00.000Z`. +==== + +-- + +[[querying-range-fields]] +`relation`:: ++ +-- +Indicates how the range query matches values for `range` fields. Optional. Valid +values are: + +`INTERSECTS` (Default):: +Matches documents with a range field value that intersects the query's range. + +`CONTAINS`:: +Matches documents with a range field value that entirely contains the query's range. + +`WITHIN`:: +Matches documents with a range field value entirely within the query's range. +-- + +`time_zone`:: ++ +-- +https://en.wikipedia.org/wiki/List_of_UTC_time_offsets[Coordinated Universal +Time (UTC) offset] or +https://en.wikipedia.org/wiki/List_of_tz_database_time_zones[IANA time zone] +used to convert `date` values in the query to UTC. Optional. + +Valid values are ISO 8601 UTC offsets, such as `+01:00` or -`08:00`, and IANA +time zone IDs, such as `America/Los_Angeles`. + +For an example query using the `time_zone` parameter, see +<>. + +[NOTE] +==== +The `time_zone` parameter does **not** affect the <> value +of `now`. `now` is always the current system time in UTC. + +However, the `time_zone` parameter does convert dates calculated using `now` and +<>. For example, the `time_zone` parameter will +convert a value of `now/d`. +==== +-- + +`boost`:: ++ +-- +Floating point number used to decrease or increase the +<> of a query. Default is `1.0`. +Optional. + +You can use the `boost` parameter to adjust relevance scores for searches +containing two or more queries. + +Boost values are relative to the default value of `1.0`. A boost value between +`0` and `1.0` decreases the relevance score. A value greater than `1.0` +increases the relevance score. +-- + +[[range-query-notes]] +==== Notes [[ranges-on-dates]] -==== Ranges on date fields +===== Using the `range` query with `date` fields + +When the `` parameter is a <> field datatype, you can use +<> with the following parameters: -When running `range` queries on fields of type <>, ranges can be -specified using <>: +* `gt` +* `gte` +* `lt` +* `lte` + +For example, the following search returns documents where the `timestamp` field +contains a date between today and yesterday. [source,js] --------------------------------------------------- +---- GET _search { "query": { "range" : { - "date" : { + "timestamp" : { "gte" : "now-1d/d", "lt" : "now/d" } } } } --------------------------------------------------- +---- // CONSOLE -===== Date math and rounding - -When using <> to round dates to the nearest day, month, -hour, etc, the rounded dates depend on whether the ends of the ranges are -inclusive or exclusive. -Rounding up moves to the last millisecond of the rounding scope, and rounding -down to the first millisecond of the rounding scope. For example: +[[range-query-date-math-rounding]] +====== Date math and rounding +{es} rounds <> values in parameters as follows: -[horizontal] `gt`:: ++ +-- +Rounds up to the lastest millisecond. - Greater than the date rounded up: `2014-11-18||/M` becomes - `2014-11-30T23:59:59.999`, ie excluding the entire month. +For example, `2014-11-18||/M` rounds up to `2014-11-30T23:59:59.999`, including +the entire month. +-- `gte`:: ++ +-- +Rounds down to the first millisecond. - Greater than or equal to the date rounded down: `2014-11-18||/M` becomes - `2014-11-01`, ie including the entire month. +For example, `2014-11-18||/M` rounds down to `2014-11-01`, excluding +the entire month. +-- `lt`:: ++ +-- +Rounds down to the first millisecond. - Less than the date rounded down: `2014-11-18||/M` becomes `2014-11-01`, ie - excluding the entire month. +For example, `2014-11-18||/M` rounds down to `2014-11-01`, excluding +the entire month. +-- `lte`:: ++ +-- +Rounds up to the lastest millisecond. - Less than or equal to the date rounded up: `2014-11-18||/M` becomes - `2014-11-30T23:59:59.999`, ie including the entire month. +For example, `2014-11-18||/M` rounds up to `2014-11-30T23:59:59.999`, including +the entire month. +-- -===== Date format in range queries +[[range-query-time-zone]] +===== Example query using `time_zone` parameter -Formatted dates will be parsed using the <> -specified on the <> field by default, but it can be overridden by -passing the `format` parameter to the `range` query: +You can use the `time_zone` parameter to convert `date` values to UTC using a +UTC offset. For example: [source,js] --------------------------------------------------- -GET _search -{ - "query": { - "range" : { - "born" : { - "gte": "01/01/2012", - "lte": "2013", - "format": "dd/MM/yyyy||yyyy" - } - } - } -} --------------------------------------------------- -// CONSOLE - -Note that if the date misses some of the year, month and day coordinates, the -missing parts are filled with the start of -https://en.wikipedia.org/wiki/Unix_time[unix time], which is January 1st, 1970. -This means, that when e.g. specifying `dd` as the format, a value like `"gte" : 10` -will translate to `1970-01-10T00:00:00.000Z`. - -===== Time zone in range queries - -Dates can be converted from another timezone to UTC either by specifying the -time zone in the date value itself (if the <> -accepts it), or it can be specified as the `time_zone` parameter: - -[source,js] --------------------------------------------------- +---- GET _search { "query": { "range" : { "timestamp" : { - "gte": "2015-01-01 00:00:00", <1> - "lte": "now", <2> - "time_zone": "+01:00" + "time_zone": "+01:00", <1> + "gte": "2015-01-01 00:00:00", <2> + "lte": "now" <3> } } } } --------------------------------------------------- +---- // CONSOLE -<1> This date will be converted to `2014-12-31T23:00:00 UTC`. -<2> `now` is not affected by the `time_zone` parameter, its always the current system time (in UTC). -However, when using <> (e.g. down to the nearest day using `now/d`), -the provided `time_zone` will be considered. - - -[[querying-range-fields]] -==== Querying range fields - -`range` queries can be used on fields of type <>, allowing to -match a range specified in the query with a range field value in the document. -The `relation` parameter controls how these two ranges are matched: - -[horizontal] -`WITHIN`:: - - Matches documents who's range field is entirely within the query's range. - -`CONTAINS`:: - - Matches documents who's range field entirely contains the query's range. - -`INTERSECTS`:: - - Matches documents who's range field intersects the query's range. - This is the default value when querying range fields. - -For examples, see <> mapping type. +<1> Indicates that `date` values use a UTC offset of `+01:00`. +<2> With a UTC offset of `+01:00`, {es} converts this date to +`2014-12-31T23:00:00 UTC`. +<3> The `time_zone` parameter does not affect the `now` value. \ No newline at end of file From f83d8c2666710b64611ec766123af1707855bec9 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 25 Jun 2019 22:38:55 -0400 Subject: [PATCH 003/140] Ensure relocation target still tracked when start handoff (#42201) If the master removes the relocating shard, but recovery isn't aware of it, then we can enter an invalid state where ReplicationTracker does not include the local shard. --- .../index/seqno/ReplicationTracker.java | 25 +++++--- .../elasticsearch/index/shard/IndexShard.java | 7 ++- .../recovery/RecoverySourceHandler.java | 2 +- .../index/seqno/ReplicationTrackerTests.java | 4 +- .../index/shard/IndexShardTests.java | 57 ++++++++++++++----- 5 files changed, 67 insertions(+), 28 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 2d824196db1cd..436ba9c9fb1cd 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -105,12 +105,13 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L private volatile long operationPrimaryTerm; /** - * Boolean flag that indicates if a relocation handoff is in progress. A handoff is started by calling {@link #startRelocationHandoff} - * and is finished by either calling {@link #completeRelocationHandoff} or {@link #abortRelocationHandoff}, depending on whether the - * handoff was successful or not. During the handoff, which has as main objective to transfer the internal state of the global - * checkpoint tracker from the relocation source to the target, the list of in-sync shard copies cannot grow, otherwise the relocation - * target might miss this information and increase the global checkpoint to eagerly. As consequence, some of the methods in this class - * are not allowed to be called while a handoff is in progress, in particular {@link #markAllocationIdAsInSync}. + * Boolean flag that indicates if a relocation handoff is in progress. A handoff is started by calling + * {@link #startRelocationHandoff(String)} and is finished by either calling {@link #completeRelocationHandoff} or + * {@link #abortRelocationHandoff}, depending on whether the handoff was successful or not. During the handoff, which has as main + * objective to transfer the internal state of the global checkpoint tracker from the relocation source to the target, the list of + * in-sync shard copies cannot grow, otherwise the relocation target might miss this information and increase the global checkpoint + * to eagerly. As consequence, some of the methods in this class are not allowed to be called while a handoff is in progress, + * in particular {@link #markAllocationIdAsInSync}. * * A notable exception to this is the method {@link #updateFromMaster}, which is still allowed to be called during a relocation handoff. * The reason for this is that the handoff might fail and can be aborted (using {@link #abortRelocationHandoff}), in which case @@ -979,11 +980,15 @@ private synchronized void updateGlobalCheckpointOnPrimary() { /** * Initiates a relocation handoff and returns the corresponding primary context. */ - public synchronized PrimaryContext startRelocationHandoff() { + public synchronized PrimaryContext startRelocationHandoff(String targetAllocationId) { assert invariant(); assert primaryMode; assert handoffInProgress == false; assert pendingInSync.isEmpty() : "relocation handoff started while there are still shard copies pending in-sync: " + pendingInSync; + if (checkpoints.containsKey(targetAllocationId) == false) { + // can happen if the relocation target was removed from cluster but the recovery process isn't aware of that. + throw new IllegalStateException("relocation target [" + targetAllocationId + "] is no longer part of the replication group"); + } handoffInProgress = true; // copy clusterStateVersion and checkpoints and return // all the entries from checkpoints that are inSync: the reason we don't need to care about initializing non-insync entries @@ -1037,6 +1042,12 @@ public synchronized void completeRelocationHandoff() { public synchronized void activateWithPrimaryContext(PrimaryContext primaryContext) { assert invariant(); assert primaryMode == false; + // TODO: remove this check after backporting to 7.x + if (primaryContext.checkpoints.containsKey(shardAllocationId) == false) { + // can happen if the old primary was on an old version + assert indexSettings.getIndexVersionCreated().before(Version.V_8_0_0); + throw new IllegalStateException("primary context [" + primaryContext + "] does not contain " + shardAllocationId); + } final Runnable runAfter = getMasterUpdateOperationFromCurrentState(); primaryMode = true; // capture current state to possibly replay missed cluster state update diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 4644a966516b8..7b4e06a451c7d 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -619,10 +619,11 @@ public IndexShardState markAsRecovering(String reason, RecoveryState recoverySta * * @param consumer a {@link Runnable} that is executed after operations are blocked * @throws IllegalIndexShardStateException if the shard is not relocating due to concurrent cancellation + * @throws IllegalStateException if the relocation target is no longer part of the replication group * @throws InterruptedException if blocking operations is interrupted */ - public void relocated(final Consumer consumer) - throws IllegalIndexShardStateException, InterruptedException { + public void relocated(final String targetAllocationId, final Consumer consumer) + throws IllegalIndexShardStateException, IllegalStateException, InterruptedException { assert shardRouting.primary() : "only primaries can be marked as relocated: " + shardRouting; final Releasable forceRefreshes = refreshListeners.forceRefreshes(); try { @@ -636,7 +637,7 @@ public void relocated(final Consumer consumer * network operation. Doing this under the mutex can implicitly block the cluster state update thread on network operations. */ verifyRelocatingState(); - final ReplicationTracker.PrimaryContext primaryContext = replicationTracker.startRelocationHandoff(); + final ReplicationTracker.PrimaryContext primaryContext = replicationTracker.startRelocationHandoff(targetAllocationId); try { consumer.accept(primaryContext); synchronized (mutex) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index fdada82c5bc56..8017362d40b80 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -652,7 +652,7 @@ void finalizeRecovery(final long targetLocalCheckpoint, final ActionListener shard.relocated(recoveryTarget::handoffPrimaryContext)); + cancellableThreads.execute(() -> shard.relocated(request.targetAllocationId(), recoveryTarget::handoffPrimaryContext)); /* * if the recovery process fails after disabling primary mode on the source shard, both relocation source and * target are failed (see {@link IndexShard#updateRoutingEntry}). diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java index 05ca0a5ea3006..70b728a4a2022 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java @@ -716,7 +716,7 @@ public void testPrimaryContextHandoff() throws IOException { newPrimary.shardAllocationId, Math.max(SequenceNumbers.NO_OPS_PERFORMED, oldPrimary.getGlobalCheckpoint() + randomInt(5))); oldPrimary.updateGlobalCheckpointForShard(newPrimary.shardAllocationId, oldPrimary.getGlobalCheckpoint()); - ReplicationTracker.PrimaryContext primaryContext = oldPrimary.startRelocationHandoff(); + ReplicationTracker.PrimaryContext primaryContext = oldPrimary.startRelocationHandoff(newPrimary.shardAllocationId); if (randomBoolean()) { // cluster state update after primary context handoff @@ -742,7 +742,7 @@ public void testPrimaryContextHandoff() throws IOException { } // do another handoff - primaryContext = oldPrimary.startRelocationHandoff(); + primaryContext = oldPrimary.startRelocationHandoff(newPrimary.shardAllocationId); } // send primary context through the wire diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 7e284aa4f73e9..f07b8c977c7fb 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -876,7 +876,7 @@ public void testOperationPermitOnReplicaShards() throws Exception { routing = newShardRouting(routing.shardId(), routing.currentNodeId(), "otherNode", true, ShardRoutingState.RELOCATING, AllocationId.newRelocation(routing.allocationId())); IndexShardTestCase.updateRoutingEntry(indexShard, routing); - indexShard.relocated(primaryContext -> {}); + indexShard.relocated(routing.getTargetRelocatingShard().allocationId().getId(), primaryContext -> {}); engineClosed = false; break; } @@ -1739,12 +1739,13 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { public void testLockingBeforeAndAfterRelocated() throws Exception { final IndexShard shard = newStartedShard(true); - IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); + final ShardRouting routing = ShardRoutingHelper.relocate(shard.routingEntry(), "other_node"); + IndexShardTestCase.updateRoutingEntry(shard, routing); CountDownLatch latch = new CountDownLatch(1); Thread recoveryThread = new Thread(() -> { latch.countDown(); try { - shard.relocated(primaryContext -> {}); + shard.relocated(routing.getTargetRelocatingShard().allocationId().getId(), primaryContext -> {}); } catch (InterruptedException e) { throw new RuntimeException(e); } @@ -1769,13 +1770,14 @@ public void testLockingBeforeAndAfterRelocated() throws Exception { public void testDelayedOperationsBeforeAndAfterRelocated() throws Exception { final IndexShard shard = newStartedShard(true); - IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); + final ShardRouting routing = ShardRoutingHelper.relocate(shard.routingEntry(), "other_node"); + IndexShardTestCase.updateRoutingEntry(shard, routing); final CountDownLatch startRecovery = new CountDownLatch(1); final CountDownLatch relocationStarted = new CountDownLatch(1); Thread recoveryThread = new Thread(() -> { try { startRecovery.await(); - shard.relocated(primaryContext -> relocationStarted.countDown()); + shard.relocated(routing.getTargetRelocatingShard().allocationId().getId(), primaryContext -> relocationStarted.countDown()); } catch (InterruptedException e) { throw new RuntimeException(e); } @@ -1840,7 +1842,8 @@ public void onFailure(Exception e) { public void testStressRelocated() throws Exception { final IndexShard shard = newStartedShard(true); assertFalse(shard.isRelocatedPrimary()); - IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); + final ShardRouting routing = ShardRoutingHelper.relocate(shard.routingEntry(), "other_node"); + IndexShardTestCase.updateRoutingEntry(shard, routing); final int numThreads = randomIntBetween(2, 4); Thread[] indexThreads = new Thread[numThreads]; CountDownLatch allPrimaryOperationLocksAcquired = new CountDownLatch(numThreads); @@ -1862,7 +1865,7 @@ public void run() { AtomicBoolean relocated = new AtomicBoolean(); final Thread recoveryThread = new Thread(() -> { try { - shard.relocated(primaryContext -> {}); + shard.relocated(routing.getTargetRelocatingShard().allocationId().getId(), primaryContext -> {}); } catch (InterruptedException e) { throw new RuntimeException(e); } @@ -1895,8 +1898,9 @@ public void run() { public void testRelocatedShardCanNotBeRevived() throws IOException, InterruptedException { final IndexShard shard = newStartedShard(true); final ShardRouting originalRouting = shard.routingEntry(); - IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(originalRouting, "other_node")); - shard.relocated(primaryContext -> {}); + final ShardRouting routing = ShardRoutingHelper.relocate(originalRouting, "other_node"); + IndexShardTestCase.updateRoutingEntry(shard, routing); + shard.relocated(routing.getTargetRelocatingShard().allocationId().getId(), primaryContext -> {}); expectThrows(IllegalIndexShardStateException.class, () -> IndexShardTestCase.updateRoutingEntry(shard, originalRouting)); closeShards(shard); } @@ -1904,16 +1908,19 @@ public void testRelocatedShardCanNotBeRevived() throws IOException, InterruptedE public void testShardCanNotBeMarkedAsRelocatedIfRelocationCancelled() throws IOException { final IndexShard shard = newStartedShard(true); final ShardRouting originalRouting = shard.routingEntry(); - IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(originalRouting, "other_node")); + final ShardRouting relocationRouting = ShardRoutingHelper.relocate(originalRouting, "other_node"); + IndexShardTestCase.updateRoutingEntry(shard, relocationRouting); IndexShardTestCase.updateRoutingEntry(shard, originalRouting); - expectThrows(IllegalIndexShardStateException.class, () -> shard.relocated(primaryContext -> {})); + expectThrows(IllegalIndexShardStateException.class, + () -> shard.relocated(relocationRouting.getTargetRelocatingShard().allocationId().getId(), primaryContext -> {})); closeShards(shard); } public void testRelocatedShardCanNotBeRevivedConcurrently() throws IOException, InterruptedException, BrokenBarrierException { final IndexShard shard = newStartedShard(true); final ShardRouting originalRouting = shard.routingEntry(); - IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(originalRouting, "other_node")); + final ShardRouting relocationRouting = ShardRoutingHelper.relocate(originalRouting, "other_node"); + IndexShardTestCase.updateRoutingEntry(shard, relocationRouting); CyclicBarrier cyclicBarrier = new CyclicBarrier(3); AtomicReference relocationException = new AtomicReference<>(); Thread relocationThread = new Thread(new AbstractRunnable() { @@ -1925,7 +1932,7 @@ public void onFailure(Exception e) { @Override protected void doRun() throws Exception { cyclicBarrier.await(); - shard.relocated(primaryContext -> {}); + shard.relocated(relocationRouting.getTargetRelocatingShard().allocationId().getId(), primaryContext -> {}); } }); relocationThread.start(); @@ -1953,7 +1960,8 @@ protected void doRun() throws Exception { assertThat(relocationException.get(), nullValue()); } else { logger.debug("shard relocation was cancelled"); - assertThat(relocationException.get(), instanceOf(IllegalIndexShardStateException.class)); + assertThat(relocationException.get(), + either(instanceOf(IllegalIndexShardStateException.class)).or(instanceOf(IllegalStateException.class))); assertThat("current routing:" + shard.routingEntry(), shard.routingEntry().relocating(), equalTo(false)); assertThat(cancellingException.get(), nullValue()); @@ -1961,6 +1969,25 @@ protected void doRun() throws Exception { closeShards(shard); } + public void testRelocateMissingTarget() throws Exception { + final IndexShard shard = newStartedShard(true); + final ShardRouting original = shard.routingEntry(); + final ShardRouting toNode1 = ShardRoutingHelper.relocate(original, "node_1"); + IndexShardTestCase.updateRoutingEntry(shard, toNode1); + IndexShardTestCase.updateRoutingEntry(shard, original); + final ShardRouting toNode2 = ShardRoutingHelper.relocate(original, "node_2"); + IndexShardTestCase.updateRoutingEntry(shard, toNode2); + final AtomicBoolean relocated = new AtomicBoolean(); + final IllegalStateException error = expectThrows(IllegalStateException.class, + () -> shard.relocated(toNode1.getTargetRelocatingShard().allocationId().getId(), ctx -> relocated.set(true))); + assertThat(error.getMessage(), equalTo("relocation target [" + toNode1.getTargetRelocatingShard().allocationId().getId() + + "] is no longer part of the replication group")); + assertFalse(relocated.get()); + shard.relocated(toNode2.getTargetRelocatingShard().allocationId().getId(), ctx -> relocated.set(true)); + assertTrue(relocated.get()); + closeShards(shard); + } + public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { /* * The flow of this test: @@ -2266,7 +2293,7 @@ public void testRecoveryFailsAfterMovingToRelocatedState() throws InterruptedExc assertThat(shard.state(), equalTo(IndexShardState.STARTED)); ShardRouting inRecoveryRouting = ShardRoutingHelper.relocate(origRouting, "some_node"); IndexShardTestCase.updateRoutingEntry(shard, inRecoveryRouting); - shard.relocated(primaryContext -> {}); + shard.relocated(inRecoveryRouting.getTargetRelocatingShard().allocationId().getId(), primaryContext -> {}); assertTrue(shard.isRelocatedPrimary()); try { IndexShardTestCase.updateRoutingEntry(shard, origRouting); From 1a7730160f0a2401c6d90978f0154dfe26b79a05 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 25 Jun 2019 22:45:27 -0400 Subject: [PATCH 004/140] Adjust bwc assertion after backporting #42201 --- .../org/elasticsearch/index/seqno/ReplicationTracker.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 436ba9c9fb1cd..20e998b1e12a6 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -1042,12 +1042,6 @@ public synchronized void completeRelocationHandoff() { public synchronized void activateWithPrimaryContext(PrimaryContext primaryContext) { assert invariant(); assert primaryMode == false; - // TODO: remove this check after backporting to 7.x - if (primaryContext.checkpoints.containsKey(shardAllocationId) == false) { - // can happen if the old primary was on an old version - assert indexSettings.getIndexVersionCreated().before(Version.V_8_0_0); - throw new IllegalStateException("primary context [" + primaryContext + "] does not contain " + shardAllocationId); - } final Runnable runAfter = getMasterUpdateOperationFromCurrentState(); primaryMode = true; // capture current state to possibly replay missed cluster state update From d579f892dbd8e70acc27ecb95b9035a97352d8d8 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 25 Jun 2019 20:45:38 -0700 Subject: [PATCH 005/140] Remove guice modules from plugins (#43555) Guice modules provide a way to supply injected parameters to other injected constructors. The last remiaing use in plugins is to provide constructor arguments to transport actions. This commit removes the ability for plugins to provide guice modules. Any transport action parameters should be concretely typed and returned from createComponents, which are still injected. While this does not remove guice completely, it removes it from all remaining plugin apis. --- .../painless/PainlessPlugin.java | 19 +++++++-- .../java/org/elasticsearch/node/Node.java | 8 ---- .../org/elasticsearch/plugins/Plugin.java | 16 -------- .../elasticsearch/plugins/PluginsService.java | 19 --------- .../index/SettingsListenerIT.java | 17 ++++++-- .../test/MockIndexEventListener.java | 17 ++++++-- .../xpack/core/EmptyXPackFeatureSet.java | 24 ----------- .../elasticsearch/xpack/core/XPackPlugin.java | 10 ----- .../core/LocalStateCompositeXPackPlugin.java | 11 ----- .../xpack/monitoring/Monitoring.java | 19 ++------- .../monitoring/MonitoringUsageServices.java | 23 +++++++++++ .../MonitoringUsageTransportAction.java | 12 +++--- .../MonitoringInfoTransportActionTests.java | 3 +- .../xpack/security/Security.java | 40 +++---------------- .../xpack/security/SecurityUsageServices.java | 30 ++++++++++++++ .../SecurityUsageTransportAction.java | 12 +++--- .../SecurityInfoTransportActionTests.java | 4 +- .../xpack/watcher/ClockHolder.java | 23 +++++++++++ .../elasticsearch/xpack/watcher/Watcher.java | 18 +-------- .../actions/ack/TransportAckWatchAction.java | 5 ++- .../TransportActivateWatchAction.java | 5 ++- .../execute/TransportExecuteWatchAction.java | 5 ++- .../actions/get/TransportGetWatchAction.java | 5 ++- .../actions/put/TransportPutWatchAction.java | 5 ++- .../xpack/watcher/WatcherPluginTests.java | 1 - .../AbstractWatcherIntegrationTestCase.java | 8 ++-- .../ack/TransportAckWatchActionTests.java | 4 +- .../put/TransportPutWatchActionTests.java | 3 +- 28 files changed, 170 insertions(+), 196 deletions(-) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/EmptyXPackFeatureSet.java create mode 100644 x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageServices.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageServices.java create mode 100644 x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/ClockHolder.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 96a03fe5914bd..92cd4f575feec 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -23,14 +23,19 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.inject.Module; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.painless.action.PainlessContextAction; import org.elasticsearch.painless.action.PainlessExecuteAction; import org.elasticsearch.painless.spi.PainlessExtension; @@ -45,7 +50,10 @@ import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.pipeline.MovingFunctionScript; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; import java.util.ArrayList; import java.util.Arrays; @@ -103,8 +111,13 @@ public ScriptEngine getScriptEngine(Settings settings, Collection createGuiceModules() { - return Collections.singleton(b -> b.bind(PainlessScriptEngine.class).toInstance(painlessScriptEngine.get())); + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, Environment environment, + NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + // this is a hack to bind the painless script engine in guice (all components are added to guice), so that + // the painless context api. this is a temporary measure until transport actions do no require guice + return Collections.singletonList(painlessScriptEngine.get()); } @Override diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index e959904b8fee7..3fab82c3e9eb9 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -65,7 +65,6 @@ import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Key; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lease.Releasables; @@ -156,7 +155,6 @@ import org.elasticsearch.watcher.ResourceWatcherService; import javax.net.ssl.SNIHostName; - import java.io.BufferedWriter; import java.io.Closeable; import java.io.IOException; @@ -374,10 +372,6 @@ protected Node( final UsageService usageService = new UsageService(); ModulesBuilder modules = new ModulesBuilder(); - // plugin modules must be added here, before others or we can get crazy injection errors... - for (Module pluginModule : pluginsService.createGuiceModules()) { - modules.add(pluginModule); - } final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool, clusterInfoService); ClusterModule clusterModule = new ClusterModule(settings, clusterService, clusterPlugins, clusterInfoService); modules.add(clusterModule); @@ -595,8 +589,6 @@ protected Node( List pluginLifecycleComponents = pluginComponents.stream() .filter(p -> p instanceof LifecycleComponent) .map(p -> (LifecycleComponent) p).collect(Collectors.toList()); - pluginLifecycleComponents.addAll(pluginsService.getGuiceServiceClasses().stream() - .map(injector::getInstance).collect(Collectors.toList())); resourcesToClose.addAll(pluginLifecycleComponents); this.pluginLifecycleComponents = Collections.unmodifiableList(pluginLifecycleComponents); client.initialize(injector.getInstance(new Key>() {}), diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index 08a359bab39fc..5bc8e9267a515 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; @@ -84,21 +83,6 @@ protected Optional getFeature() { return Optional.empty(); } - /** - * Node level guice modules. - */ - public Collection createGuiceModules() { - return Collections.emptyList(); - } - - /** - * Node level services that will be automatically started/stopped/closed. This classes must be constructed - * by injection with guice. - */ - public Collection> getGuiceServiceClasses() { - return Collections.emptyList(); - } - /** * Returns components added by this plugin. * diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index c6951f8709130..feb18c61e3365 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -33,8 +33,6 @@ import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -237,14 +235,6 @@ public Settings updatedSettings() { return builder.put(this.settings).build(); } - public Collection createGuiceModules() { - List modules = new ArrayList<>(); - for (Tuple plugin : plugins) { - modules.addAll(plugin.v2().createGuiceModules()); - } - return modules; - } - public List> getExecutorBuilders(Settings settings) { final ArrayList> builders = new ArrayList<>(); for (final Tuple plugin : plugins) { @@ -253,15 +243,6 @@ public List> getExecutorBuilders(Settings settings) { return builders; } - /** Returns all classes injected into guice by plugins which extend {@link LifecycleComponent}. */ - public Collection> getGuiceServiceClasses() { - List> services = new ArrayList<>(); - for (Tuple plugin : plugins) { - services.addAll(plugin.v2().getGuiceServiceClasses()); - } - return services; - } - public void onIndexModule(IndexModule indexModule) { for (Tuple plugin : plugins) { plugin.v2().onIndexModule(indexModule); diff --git a/server/src/test/java/org/elasticsearch/index/SettingsListenerIT.java b/server/src/test/java/org/elasticsearch/index/SettingsListenerIT.java index 8b603420dfd05..21f15cc46907c 100644 --- a/server/src/test/java/org/elasticsearch/index/SettingsListenerIT.java +++ b/server/src/test/java/org/elasticsearch/index/SettingsListenerIT.java @@ -18,14 +18,22 @@ */ package org.elasticsearch.index; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; import java.util.Arrays; import java.util.Collection; @@ -60,8 +68,11 @@ public void onIndexModule(IndexModule module) { } @Override - public Collection createGuiceModules() { - return Collections.singletonList(new SettingsListenerModule(service)); + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, Environment environment, + NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + return Collections.singletonList(service); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java index 58b722cf15274..77d1c315b6970 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java @@ -18,12 +18,17 @@ */ package org.elasticsearch.test; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; @@ -34,6 +39,9 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; import java.util.Arrays; import java.util.Collection; @@ -72,8 +80,11 @@ public void onIndexModule(IndexModule module) { } @Override - public Collection createGuiceModules() { - return Collections.singleton(binder -> binder.bind(TestEventListener.class).toInstance(listener)); + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, Environment environment, + NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + return Collections.singletonList(listener); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/EmptyXPackFeatureSet.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/EmptyXPackFeatureSet.java deleted file mode 100644 index 5f8f2ce838585..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/EmptyXPackFeatureSet.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core; - -public class EmptyXPackFeatureSet implements XPackFeatureSet { - @Override - public String name() { - return "Empty XPackFeatureSet"; - } - - @Override - public boolean available() { - return false; - } - - @Override - public boolean enabled() { - return false; - } - -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 9632577b4fb47..50cfcd5abaff7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.inject.Binder; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationLogger; @@ -221,15 +220,6 @@ public Settings additionalSettings() { return Settings.builder().put(super.additionalSettings()).put(xpackInstalledNodeAttrSetting, "true").build(); } - @Override - public Collection createGuiceModules() { - ArrayList modules = new ArrayList<>(); - //modules.add(b -> b.bind(Clock.class).toInstance(getClock())); - // used to get core up and running, we do not bind the actual feature set here - modules.add(b -> XPackPlugin.createFeatureSetMultiBinder(b, EmptyXPackFeatureSet.class)); - return modules; - } - @Override public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index 350840883a7fe..e334f72bc4a8a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; @@ -136,16 +135,6 @@ protected void setLicenseState(XPackLicenseState licenseState) { this.licenseState = licenseState; } - @Override - public Collection createGuiceModules() { - ArrayList modules = new ArrayList<>(); - modules.addAll(super.createGuiceModules()); - filterPlugins(Plugin.class).stream().forEach(p -> - modules.addAll(p.createGuiceModules()) - ); - return modules; - } - @Override public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index 5fac72d5edad9..29a3caec10bcb 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -11,8 +11,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -95,25 +93,13 @@ boolean isEnabled() { return enabled; } - @Override - public Collection createGuiceModules() { - List modules = new ArrayList<>(); - modules.add(b -> { - if (enabled == false) { - b.bind(MonitoringService.class).toProvider(Providers.of(null)); - b.bind(Exporters.class).toProvider(Providers.of(null)); - } - }); - return modules; - } - @Override public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { if (enabled == false) { - return Collections.emptyList(); + return Collections.singletonList(new MonitoringUsageServices(null, null)); } final ClusterSettings clusterSettings = clusterService.getClusterSettings(); @@ -137,7 +123,8 @@ public Collection createComponents(Client client, ClusterService cluster final MonitoringService monitoringService = new MonitoringService(settings, clusterService, threadPool, collectors, exporters); - return Arrays.asList(monitoringService, exporters, cleanerService); + var usageServices = new MonitoringUsageServices(monitoringService, exporters); + return Arrays.asList(monitoringService, exporters, cleanerService, usageServices); } @Override diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageServices.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageServices.java new file mode 100644 index 0000000000000..59555e073730d --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageServices.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring; + +import org.elasticsearch.xpack.monitoring.exporter.Exporters; + +/** + * A wrapper around the services needed to produce usage information for the monitoring feature. + * + * This class is temporary until actions can be constructed directly by plugins. + */ +class MonitoringUsageServices { + final MonitoringService monitoringService; + final Exporters exporters; + + MonitoringUsageServices(MonitoringService monitoringService, Exporters exporters) { + this.monitoringService = monitoringService; + this.exporters = exporters; + } +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageTransportAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageTransportAction.java index 6666a8f0c4158..27fe91a08bbd3 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageTransportAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageTransportAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; @@ -30,26 +29,25 @@ public class MonitoringUsageTransportAction extends XPackUsageFeatureTransportAction { private final boolean enabled; - private final MonitoringService monitoring; + private final MonitoringService monitoringService; private final XPackLicenseState licenseState; private final Exporters exporters; @Inject public MonitoringUsageTransportAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Settings settings, XPackLicenseState licenseState, @Nullable MonitoringService monitoring, - @Nullable Exporters exporters) { + Settings settings, XPackLicenseState licenseState, MonitoringUsageServices monitoringServices) { super(XPackUsageFeatureAction.MONITORING.name(), transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver); this.enabled = XPackSettings.MONITORING_ENABLED.get(settings); this.licenseState = licenseState; - this.monitoring = monitoring; - this.exporters = exporters; + this.monitoringService = monitoringServices.monitoringService; + this.exporters = monitoringServices.exporters; } @Override protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { - final boolean collectionEnabled = monitoring != null && monitoring.isMonitoringActive(); + final boolean collectionEnabled = monitoringService != null && monitoringService.isMonitoringActive(); var usage = new MonitoringFeatureSetUsage(licenseState.isMonitoringAllowed(), enabled, collectionEnabled, exportersUsage(exporters)); listener.onResponse(new XPackUsageFeatureResponse(usage)); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringInfoTransportActionTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringInfoTransportActionTests.java index 6dc53b3d67a73..bc7e9ffca5cc9 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringInfoTransportActionTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringInfoTransportActionTests.java @@ -103,7 +103,8 @@ public void testUsage() throws Exception { when(monitoring.isMonitoringActive()).thenReturn(collectionEnabled); var usageAction = new MonitoringUsageTransportAction(mock(TransportService.class), null, null, - mock(ActionFilters.class), null, Settings.EMPTY,licenseState, monitoring, exporters); + mock(ActionFilters.class), null, Settings.EMPTY,licenseState, + new MonitoringUsageServices(monitoring, exporters)); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(null, null, future); MonitoringFeatureSetUsage monitoringUsage = (MonitoringFeatureSetUsage) future.get().getUsage(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 3af8a7e36d1e8..56797baff6889 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -24,8 +24,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; @@ -324,35 +322,6 @@ private static void runStartupChecks(Settings settings) { validateRealmSettings(settings); } - @Override - public Collection createGuiceModules() { - List modules = new ArrayList<>(); - if (enabled == false) { - modules.add(b -> b.bind(IPFilter.class).toProvider(Providers.of(null))); - } - - if (enabled == false) { - modules.add(b -> { - b.bind(Realms.class).toProvider(Providers.of(null)); // for SecurityInfoTransportAction - b.bind(CompositeRolesStore.class).toProvider(Providers.of(null)); // for SecurityInfoTransportAction - b.bind(NativeRoleMappingStore.class).toProvider(Providers.of(null)); // for SecurityInfoTransportAction - b.bind(AuditTrailService.class) - .toInstance(new AuditTrailService(Collections.emptyList(), getLicenseState())); - }); - return modules; - } - - // we can't load that at construction time since the license plugin might not have been loaded at that point - // which might not be the case during Plugin class instantiation. Once nodeModules are pulled - // everything should have been loaded - modules.add(b -> { - if (XPackSettings.AUDIT_ENABLED.get(settings)) { - b.bind(AuditTrail.class).to(AuditTrailService.class); // interface used by some actions... - } - }); - return modules; - } - // overridable by tests protected Clock getClock() { return Clock.systemUTC(); @@ -376,7 +345,7 @@ public Collection createComponents(Client client, ClusterService cluster Collection createComponents(Client client, ThreadPool threadPool, ClusterService clusterService, ResourceWatcherService resourceWatcherService, ScriptService scriptService) throws Exception { if (enabled == false) { - return Collections.emptyList(); + return Collections.singletonList(new SecurityUsageServices(null, null, null, null)); } threadContext.set(threadPool.getThreadContext()); @@ -485,6 +454,8 @@ auditTrailService, failureHandler, threadPool, anonymousUser, getAuthorizationEn securityActionFilter.set(new SecurityActionFilter(authcService.get(), authzService, getLicenseState(), threadPool, securityContext.get(), destructiveOperations)); + components.add(new SecurityUsageServices(realms, allRolesStore, nativeRoleMappingStore, ipFilter.get())); + return components; } @@ -761,8 +732,9 @@ public List getActionFilters() { @Override public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster) { + IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster) { if (enabled == false) { return emptyList(); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageServices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageServices.java new file mode 100644 index 0000000000000..e85459c484b34 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageServices.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.elasticsearch.xpack.security.authc.Realms; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.transport.filter.IPFilter; + +/** + * A wrapper around the services needed to produce usage information for the security feature. + * + * This class is temporary until actions can be constructed directly by plugins. + */ +class SecurityUsageServices { + final Realms realms; + final CompositeRolesStore rolesStore; + final NativeRoleMappingStore roleMappingStore; + final IPFilter ipFilter; + + SecurityUsageServices(Realms realms, CompositeRolesStore rolesStore, NativeRoleMappingStore roleMappingStore, IPFilter ipFilter) { + this.realms = realms; + this.rolesStore = rolesStore; + this.roleMappingStore = roleMappingStore; + this.ipFilter = ipFilter; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java index 3281373580747..e9d23567e062a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java @@ -55,18 +55,16 @@ public class SecurityUsageTransportAction extends XPackUsageFeatureTransportActi @Inject public SecurityUsageTransportAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Settings settings, XPackLicenseState licenseState, @Nullable Realms realms, - @Nullable CompositeRolesStore rolesStore, @Nullable NativeRoleMappingStore roleMappingStore, - @Nullable IPFilter ipFilter) { + Settings settings, XPackLicenseState licenseState, SecurityUsageServices securityServices) { super(XPackUsageFeatureAction.SECURITY.name(), transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver); this.enabledInSettings = XPackSettings.SECURITY_ENABLED.get(settings); this.settings = settings; this.licenseState = licenseState; - this.realms = realms; - this.rolesStore = rolesStore; - this.roleMappingStore = roleMappingStore; - this.ipFilter = ipFilter; + this.realms = securityServices.realms; + this.rolesStore = securityServices.rolesStore; + this.roleMappingStore = securityServices.roleMappingStore; + this.ipFilter = securityServices.ipFilter; } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityInfoTransportActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityInfoTransportActionTests.java index 21517144d0457..fe89488de86a9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityInfoTransportActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityInfoTransportActionTests.java @@ -55,6 +55,7 @@ public class SecurityInfoTransportActionTests extends ESTestCase { private IPFilter ipFilter; private CompositeRolesStore rolesStore; private NativeRoleMappingStore roleMappingStore; + private SecurityUsageServices securityServices; @Before public void init() throws Exception { @@ -64,6 +65,7 @@ public void init() throws Exception { ipFilter = mock(IPFilter.class); rolesStore = mock(CompositeRolesStore.class); roleMappingStore = mock(NativeRoleMappingStore.class); + securityServices = new SecurityUsageServices(realms, rolesStore, roleMappingStore, ipFilter); } public void testAvailable() { @@ -327,6 +329,6 @@ private void configureRoleMappingStoreUsage(boolean roleMappingStoreEnabled) { private SecurityUsageTransportAction newUsageAction(Settings settings) { return new SecurityUsageTransportAction(mock(TransportService.class),null, null, mock(ActionFilters.class),null, - settings, licenseState, realms, rolesStore, roleMappingStore, ipFilter); + settings, licenseState, securityServices); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/ClockHolder.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/ClockHolder.java new file mode 100644 index 0000000000000..f94c90e8fd0eb --- /dev/null +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/ClockHolder.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher; + +import java.time.Clock; + +/** + * A wrapper around {@link java.time.Clock} to provide a concrete type for Guice injection. + * + * This class is temporary until {@link java.time.Clock} can be passed to action constructors + * directly, or the actions can be rewritten to be unit tested with the clock overriden + * just for unit tests instead of via Node construction. + */ +public final class ClockHolder { + public final Clock clock; + + public ClockHolder(Clock clock) { + this.clock = clock; + } +} diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 64799a4c57d65..1b8fcc2658f79 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -21,8 +21,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.regex.Regex; @@ -410,7 +408,8 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) listener = new WatcherIndexingListener(watchParser, getClock(), triggerService); clusterService.addListener(listener); - return Arrays.asList(registry, inputRegistry, historyStore, triggerService, triggeredWatchParser, + // note: clock is needed here until actions can be constructed directly instead of by guice + return Arrays.asList(new ClockHolder(getClock()), registry, inputRegistry, historyStore, triggerService, triggeredWatchParser, watcherLifeCycleService, executionService, triggerEngineListener, watcherService, watchParser, configuredTriggerEngine, triggeredWatchStore, watcherSearchTemplateService, slackService, pagerDutyService); } @@ -427,19 +426,6 @@ protected Consumer> getTriggerEngineListener(ExecutionSer return new AsyncTriggerEventConsumer(executionService); } - @Override - public Collection createGuiceModules() { - List modules = new ArrayList<>(); - modules.add(b -> b.bind(Clock.class).toInstance(getClock())); //currently assuming the only place clock is bound - modules.add(b -> { - if (enabled == false) { - b.bind(WatcherService.class).toProvider(Providers.of(null)); - } - }); - - return modules; - } - @Override public List> getSettings() { List> settings = new ArrayList<>(); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java index 36643ed18f8d8..ab26c5df6303d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsRequest; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.core.watcher.watch.WatchField; +import org.elasticsearch.xpack.watcher.ClockHolder; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; import org.elasticsearch.xpack.watcher.watch.WatchParser; @@ -54,10 +55,10 @@ public class TransportAckWatchAction extends WatcherTransportAction> executorBuilders = watcher.getExecutorBuilders(settings); assertThat(executorBuilders, hasSize(0)); - assertThat(watcher.createGuiceModules(), hasSize(2)); assertThat(watcher.getActions(), hasSize(2)); assertThat(watcher.getRestHandlers(settings, null, null, null, null, null, null), hasSize(0)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 288088fa3e88e..7ad5b5e98bf4a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -54,6 +54,7 @@ import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.indexlifecycle.IndexLifecycle; +import org.elasticsearch.xpack.watcher.ClockHolder; import org.elasticsearch.xpack.watcher.notification.email.Authentication; import org.elasticsearch.xpack.watcher.notification.email.Email; import org.elasticsearch.xpack.watcher.notification.email.EmailService; @@ -64,7 +65,6 @@ import org.junit.After; import org.junit.Before; -import java.time.Clock; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; @@ -170,7 +170,7 @@ protected boolean timeWarped() { public void _setup() throws Exception { if (timeWarped()) { timeWarp = new TimeWarp(internalCluster().getInstances(ScheduleTriggerEngineMock.class), - (ClockMock)getInstanceFromMaster(Clock.class)); + (ClockMock)getInstanceFromMaster(ClockHolder.class).clock); } if (internalCluster().size() > 0) { @@ -619,13 +619,13 @@ public void removeAndEnsureHealthy(InternalTestCluster cluster) { @Override public synchronized void applyToNode(String node, InternalTestCluster cluster) { if (frozen) { - ((ClockMock)cluster.getInstance(Clock.class, node)).freeze(); + ((ClockMock)cluster.getInstance(ClockHolder.class, node).clock).freeze(); } } @Override public void removeFromNode(String node, InternalTestCluster cluster) { - ((ClockMock)cluster.getInstance(Clock.class, node)).unfreeze(); + ((ClockMock)cluster.getInstance(ClockHolder.class, node).clock).unfreeze(); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java index 0f7d64527fe26..bfaed67f6de23 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsAction; import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.ClockHolder; import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.junit.Before; @@ -63,7 +64,8 @@ public void setupAction() { client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); action = new TransportAckWatchAction(transportService, new ActionFilters(Collections.emptySet()), - Clock.systemUTC(), new XPackLicenseState(Settings.EMPTY), watchParser, client, createClusterService(threadPool)); + new ClockHolder(Clock.systemUTC()), new XPackLicenseState(Settings.EMPTY), + watchParser, client, createClusterService(threadPool)); } public void testWatchNotFound() { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java index 411bee5e2f9d9..0ee82d86c76a9 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.ClockHolder; import org.elasticsearch.xpack.watcher.test.WatchExecutionContextMockBuilder; import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.junit.Before; @@ -75,7 +76,7 @@ public void setupAction() throws Exception { }).when(client).execute(any(), any(), any()); action = new TransportPutWatchAction(transportService, threadPool, new ActionFilters(Collections.emptySet()), - new ClockMock(), new XPackLicenseState(Settings.EMPTY), parser, client); + new ClockHolder(new ClockMock()), new XPackLicenseState(Settings.EMPTY), parser, client); } public void testHeadersAreFilteredWhenPuttingWatches() throws Exception { From 017a23156318f92182d357c47e1b87318bec6078 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 25 Jun 2019 20:58:19 -0700 Subject: [PATCH 006/140] Remove unused method not caught by merge relates #43555 --- .../cluster/coordination/VotingOnlyNodePlugin.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePlugin.java b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePlugin.java index 2f16179ae227c..3142db91d3855 100644 --- a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePlugin.java +++ b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePlugin.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Setting; @@ -41,12 +40,10 @@ import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -116,13 +113,6 @@ public Collection createComponents(Client client, ClusterService cluster new ActionHandler<>(XPackInfoFeatureAction.VOTING_ONLY, VotingOnlyNodeFeatureSet.UsageInfoAction.class)); } - @Override - public Collection createGuiceModules() { - List modules = new ArrayList<>(); - modules.add(b -> XPackPlugin.bindFeatureSet(b, VotingOnlyNodeFeatureSet.class)); - return modules; - } - @Override public Map getElectionStrategies() { return Collections.singletonMap(VOTING_ONLY_ELECTION_STRATEGY, new VotingOnlyNodeElectionStrategy()); From 3ed3041221e3e725eaab3751a88abfcf5eb2e8b8 Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Wed, 26 Jun 2019 09:27:58 +0200 Subject: [PATCH 007/140] Reindex remote version lookup test refactor (#43576) Refactor test to reuse code and ease maintenance --- .../RemoteScrollableHitSourceTests.java | 49 ++++++------------- 1 file changed, 14 insertions(+), 35 deletions(-) diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 844c6b8351993..0ab100a856fc1 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -40,7 +40,6 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; import org.elasticsearch.action.bulk.BackoffPolicy; -import org.elasticsearch.index.reindex.ScrollableHitSource.Response; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.HeapBufferedAsyncResponseConsumer; import org.elasticsearch.client.RestClient; @@ -53,6 +52,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.reindex.ScrollableHitSource.Response; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; @@ -123,41 +123,20 @@ public void tearDown() throws Exception { } public void testLookupRemoteVersion() throws Exception { + assertLookupRemoteVersion(Version.fromString("0.20.5"), "main/0_20_5.json"); + assertLookupRemoteVersion(Version.fromString("0.90.13"), "main/0_90_13.json"); + assertLookupRemoteVersion(Version.fromString("1.7.5"), "main/1_7_5.json"); + assertLookupRemoteVersion(Version.fromId(2030399), "main/2_3_3.json"); + // assert for V_5_0_0 (no qualifier) since we no longer consider qualifier in Version since 7 + assertLookupRemoteVersion(Version.fromId(5000099), "main/5_0_0_alpha_3.json"); + // V_5_0_0 since we no longer consider qualifier in Version + assertLookupRemoteVersion(Version.fromId(5000099), "main/with_unknown_fields.json"); + } + + private void assertLookupRemoteVersion(Version expected, String s) throws Exception { AtomicBoolean called = new AtomicBoolean(); - sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/0_20_5.json").lookupRemoteVersion(v -> { - assertEquals(Version.fromString("0.20.5"), v); - called.set(true); - }); - assertTrue(called.get()); - called.set(false); - sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/0_90_13.json").lookupRemoteVersion(v -> { - assertEquals(Version.fromString("0.90.13"), v); - called.set(true); - }); - assertTrue(called.get()); - called.set(false); - sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/1_7_5.json").lookupRemoteVersion(v -> { - assertEquals(Version.fromString("1.7.5"), v); - called.set(true); - }); - assertTrue(called.get()); - called.set(false); - sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/2_3_3.json").lookupRemoteVersion(v -> { - assertEquals(Version.fromId(2030399), v); - called.set(true); - }); - assertTrue(called.get()); - called.set(false); - sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/5_0_0_alpha_3.json").lookupRemoteVersion(v -> { - // assert for V_5_0_0 (no qualifier) since we no longer consider qualifier in Version since 7 - assertEquals(Version.fromId(5000099), v); - called.set(true); - }); - assertTrue(called.get()); - called.set(false); - sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/with_unknown_fields.json").lookupRemoteVersion(v -> { - // V_5_0_0 since we no longer consider qualifier in Version - assertEquals(Version.fromId(5000099), v); + sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, s).lookupRemoteVersion(v -> { + assertEquals(expected, v); called.set(true); }); assertTrue(called.get()); From ac3e4a6799f8dbe1ea7f381fe6a54acea53dd007 Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Wed, 26 Jun 2019 09:36:17 +0200 Subject: [PATCH 008/140] Reindex remove outer level size (#43373) This commit finalizes the work done to rename size to max_docs in reindex and update/delete by query. size is no longer supported in URL or outer level body for the 3 APIs (though size in update/delete-by-query will and has always been interpreted as scroll_size, it is not to be relied upon). Continuation of #41894 Closes #24344 --- .../client/RequestConvertersTests.java | 26 ++------ .../migration/migrate_8_0/reindex.asciidoc | 16 ++++- .../AbstractBulkByQueryRestHandler.java | 8 +-- .../index/reindex/RoundTripTests.java | 6 +- .../test/delete_by_query/10_basic.yml | 46 ------------- .../test/delete_by_query/20_validation.yml | 37 ----------- .../test/reindex/20_validation.yml | 39 ++++------- .../rest-api-spec/test/reindex/30_search.yml | 46 ------------- .../rest-api-spec/test/reindex/90_remote.yml | 66 ------------------- .../test/update_by_query/10_basic.yml | 33 ---------- .../test/update_by_query/20_validation.yml | 34 ---------- .../rest-api-spec/api/delete_by_query.json | 4 -- .../rest-api-spec/api/update_by_query.json | 4 -- .../reindex/AbstractBulkByScrollRequest.java | 26 -------- .../index/reindex/ReindexRequest.java | 8 ++- .../AbstractBulkByScrollRequestTestCase.java | 6 +- 16 files changed, 45 insertions(+), 360 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 82ce5f6b9b946..a1946baa3e0e1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -427,11 +427,7 @@ public void testReindex() throws IOException { reindexRequest.setDestRouting("=cat"); } if (randomBoolean()) { - if (randomBoolean()) { - reindexRequest.setMaxDocs(randomIntBetween(100, 1000)); - } else { - reindexRequest.setSize(randomIntBetween(100, 1000)); - } + reindexRequest.setMaxDocs(randomIntBetween(100, 1000)); } if (randomBoolean()) { reindexRequest.setAbortOnVersionConflict(false); @@ -479,13 +475,9 @@ public void testUpdateByQuery() throws IOException { expectedParams.put("routing", "=cat"); } if (randomBoolean()) { - int size = randomIntBetween(100, 1000); - if (randomBoolean()) { - updateByQueryRequest.setMaxDocs(size); - } else { - updateByQueryRequest.setSize(size); - } - expectedParams.put("max_docs", Integer.toString(size)); + int maxDocs = randomIntBetween(100, 1000); + updateByQueryRequest.setMaxDocs(maxDocs); + expectedParams.put("max_docs", Integer.toString(maxDocs)); } if (randomBoolean()) { updateByQueryRequest.setAbortOnVersionConflict(false); @@ -528,13 +520,9 @@ public void testDeleteByQuery() throws IOException { expectedParams.put("routing", "=cat"); } if (randomBoolean()) { - int size = randomIntBetween(100, 1000); - if (randomBoolean()) { - deleteByQueryRequest.setMaxDocs(size); - } else { - deleteByQueryRequest.setSize(size); - } - expectedParams.put("max_docs", Integer.toString(size)); + int maxDocs = randomIntBetween(100, 1000); + deleteByQueryRequest.setMaxDocs(maxDocs); + expectedParams.put("max_docs", Integer.toString(maxDocs)); } if (randomBoolean()) { deleteByQueryRequest.setAbortOnVersionConflict(false); diff --git a/docs/reference/migration/migrate_8_0/reindex.asciidoc b/docs/reference/migration/migrate_8_0/reindex.asciidoc index ef4f5aed147ca..912f0f9dbf11b 100644 --- a/docs/reference/migration/migrate_8_0/reindex.asciidoc +++ b/docs/reference/migration/migrate_8_0/reindex.asciidoc @@ -12,4 +12,18 @@ Instead, please specify the index-name without any encoding. [float] ==== Removal of types -The `/{index}/{type}/_delete_by_query` and `/{index}/{type}/_update_by_query` REST endpoints have been removed in favour of `/{index}/_delete_by_query` and `/{index}/_update_by_query`, since indexes no longer contain types, these typed endpoints are obsolete. \ No newline at end of file +The `/{index}/{type}/_delete_by_query` and `/{index}/{type}/_update_by_query` REST endpoints have been removed in favour of `/{index}/_delete_by_query` and `/{index}/_update_by_query`, since indexes no longer contain types, these typed endpoints are obsolete. + +[float] +==== Removal of size parameter + +Previously, a `_reindex` request had two different size specifications in the body: + +- Outer level, determining the maximum number of documents to process +- Inside the `source` element, determining the scroll/batch size. + +The outer level `size` parameter has now been renamed to `max_docs` to +avoid confusion and clarify its semantics. + +Similarly, the `size` parameter has been renamed to `max_docs` for +`_delete_by_query` and `_update_by_query` to keep the 3 interfaces consistent. \ No newline at end of file diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java index 240ccde350532..fbfd1007a7e02 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -53,7 +52,7 @@ protected void parseInternalRequest(Request internal, RestRequest restRequest, SearchRequest searchRequest = internal.getSearchRequest(); try (XContentParser parser = extractRequestSpecificFields(restRequest, bodyConsumers)) { - RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, size -> setMaxDocsFromSearchSize(internal, size)); + RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, size -> failOnSizeSpecified()); } searchRequest.source().size(restRequest.paramAsInt("scroll_size", searchRequest.source().size())); @@ -96,8 +95,7 @@ private XContentParser extractRequestSpecificFields(RestRequest restRequest, } } - private void setMaxDocsFromSearchSize(Request request, int size) { - LoggingDeprecationHandler.INSTANCE.usedDeprecatedName("size", "max_docs"); - setMaxDocsValidateIdentical(request, size); + private static void failOnSizeSpecified() { + throw new IllegalArgumentException("invalid parameter [size], use [max_docs] instead"); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 7b8e46471e61e..d401efaae4bf0 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -107,11 +107,7 @@ private void randomRequest(AbstractBulkByScrollRequest request) { request.getSearchRequest().indices("test"); request.getSearchRequest().source().size(between(1, 1000)); if (randomBoolean()) { - if (randomBoolean()) { - request.setMaxDocs(between(1, Integer.MAX_VALUE)); - } else { - request.setSize(between(1, Integer.MAX_VALUE)); - } + request.setMaxDocs(between(1, Integer.MAX_VALUE)); } request.setAbortOnVersionConflict(random().nextBoolean()); request.setRefresh(rarely()); diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yml index c8be7eedd3a2b..1763baebe0277 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yml @@ -279,52 +279,6 @@ - match: {count: 1} ---- -"Limit by size": - - skip: - version: " - 7.2.99" - reason: "deprecation warnings only emitted on 7.3+" - features: warnings - - - do: - index: - index: twitter - id: 1 - body: { "user": "kimchy" } - - do: - index: - index: twitter - id: 2 - body: { "user": "kimchy" } - - do: - indices.refresh: {} - - - do: - warnings: - - Deprecated field [size] used, expected [max_docs] instead - delete_by_query: - index: twitter - size: 1 - body: - query: - match_all: {} - - - match: {deleted: 1} - - match: {version_conflicts: 0} - - match: {batches: 1} - - match: {failures: []} - - match: {throttled_millis: 0} - - gte: { took: 0 } - - - do: - indices.refresh: {} - - - do: - count: - index: twitter - - - match: {count: 1} - --- "Limit by size pre 7.3": - skip: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yml index 2b1a9514aef19..45de10b370669 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yml @@ -30,22 +30,6 @@ query: match_all: {} ---- -"invalid size fails": - - do: - index: - index: test - id: 1 - body: { "text": "test" } - - do: - catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ - delete_by_query: - index: test - size: -4 - body: - query: - match_all: {} - --- "invalid max_docs fails": - skip: @@ -66,27 +50,6 @@ query: match_all: {} ---- -"both max_docs and size fails": - - skip: - version: " - 7.2.99" - reason: "max_docs introduced in 7.3.0" - - - do: - index: - index: test - id: 1 - body: { "text": "test" } - - do: - catch: /\[max_docs\] set to two different values \[4\] and \[5\]/ - delete_by_query: - index: test - size: 4 - max_docs: 5 - body: - query: - match_all: {} - --- "invalid scroll_size fails": - do: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yml index 2c58f122bfeca..44971c49c358a 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yml @@ -95,21 +95,26 @@ conflicts: cat --- -"invalid size fails": +"specifying size fails": + - skip: + version: " - 7.99.99" + reason: "size supported until 8" + - do: index: - index: test - id: 1 - body: { "text": "test" } + index: test + id: 1 + body: { "text": "test" } + - do: - catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ + catch: /invalid parameter \[size\], use \[max_docs\] instead/ reindex: body: source: index: test dest: index: dest - size: -4 + size: 1 --- "invalid max_docs in body fails": @@ -153,28 +158,6 @@ dest: index: dest ---- -"inconsistent max_docs and size fails": - - skip: - version: " - 7.2.99" - reason: "max_docs introduced in 7.3.0" - - - do: - index: - index: test - id: 1 - body: { "text": "test" } - - do: - catch: /\[max_docs\] set to two different values \[4\] and \[5\]/ - reindex: - body: - source: - index: test - dest: - index: dest - size: 4 - max_docs: 5 - --- "inconsistent max_docs in body and max_docs in URL fails": - skip: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/30_search.yml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/30_search.yml index 908ab55673c56..709b9c0d17340 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/30_search.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/30_search.yml @@ -31,52 +31,6 @@ index: target - match: { hits.total: 1 } ---- -"Sorting and size combined": - - skip: - version: " - 7.2.99" - reason: "deprecation warnings only emitted on 7.3+" - features: warnings - - - do: - index: - index: test - id: 1 - body: { "order": 1 } - - do: - index: - index: test - id: 2 - body: { "order": 2 } - - do: - indices.refresh: {} - - - do: - warnings: - - Deprecated field [size] used, expected [max_docs] instead - reindex: - refresh: true - body: - size: 1 - source: - index: test - sort: order - dest: - index: target - - - do: - search: - rest_total_hits_as_int: true - index: target - - match: { hits.total: 1 } - - - do: - search: - rest_total_hits_as_int: true - index: target - q: order:1 - - match: { hits.total: 1 } - --- "Sorting and size combined pre 7.3": - skip: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yml index dccf58a51b1ae..8354fc0aaf322 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yml @@ -217,72 +217,6 @@ metric: search - match: {indices.source.total.search.open_contexts: 0} ---- -"Reindex from remote with size": - - skip: - version: "7.3.0 - " - reason: "7.3 should use max_docs or get deprecation warning" - - - do: - index: - index: source - id: 1 - body: { "text": "test" } - refresh: true - - do: - index: - index: source - id: 2 - body: { "text": "test" } - refresh: true - - # Fetch the http host. We use the host of the master because we know there will always be a master. - - do: - cluster.state: {} - - set: { master_node: master } - - do: - nodes.info: - metric: [ http ] - - is_true: nodes.$master.http.publish_address - - set: {nodes.$master.http.publish_address: host} - - do: - reindex: - refresh: true - body: - size: 1 - source: - remote: - host: http://${host} - index: source - dest: - index: dest - - match: {created: 1} - - match: {updated: 0} - - match: {version_conflicts: 0} - - match: {batches: 1} - - match: {failures: []} - - match: {throttled_millis: 0} - - gte: { took: 0 } - - is_false: task - - is_false: deleted - - - do: - search: - rest_total_hits_as_int: true - index: dest - body: - query: - match: - text: test - - match: {hits.total: 1} - - # Make sure reindex closed all the scroll contexts - - do: - indices.stats: - index: source - metric: search - - match: {indices.source.total.search.open_contexts: 0} - --- "Reindex from remote with max_docs": - skip: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yml index 67ee48d414c1b..2a3696a4005c7 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yml @@ -217,39 +217,6 @@ - match: {failures: []} - gte: { took: 0 } ---- -"Limit by size": - - skip: - version: " - 7.2.99" - reason: "deprecation warnings only emitted on 7.3+" - features: warnings - - - do: - index: - index: twitter - id: 1 - body: { "user": "kimchy" } - - do: - index: - index: twitter - id: 2 - body: { "user": "kimchy" } - - do: - indices.refresh: {} - - - do: - warnings: - - Deprecated field [size] used, expected [max_docs] instead - update_by_query: - index: twitter - size: 1 - - match: {updated: 1} - - match: {version_conflicts: 0} - - match: {batches: 1} - - match: {failures: []} - - match: {throttled_millis: 0} - - gte: { took: 0 } - --- "Limit by size pre 7.3": - skip: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yml index ddc09fe9d9a61..21644b3932984 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yml @@ -11,19 +11,6 @@ index: test conflicts: cat ---- -"invalid size fails": - - do: - index: - index: test - id: 1 - body: { "text": "test" } - - do: - catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ - update_by_query: - index: test - size: -4 - --- "invalid max_docs in URL fails": - skip: @@ -59,27 +46,6 @@ body: max_docs: -4 ---- -"inconsistent max_docs and size fails": - - skip: - version: " - 7.2.99" - reason: "max_docs introduced in 7.3.0" - - - do: - index: - index: test - id: 1 - body: { "text": "test" } - - do: - catch: /\[max_docs\] set to two different values \[4\] and \[5\]/ - delete_by_query: - index: test - size: 4 - max_docs: 5 - body: - query: - match_all: {} - --- "inconsistent max_docs in body and max_docs in URL fails": - skip: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index 38a8a13fba9fc..d4aadea8b3d80 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -86,10 +86,6 @@ "type" : "time", "description" : "Explicit timeout for each search request. Defaults to no timeout." }, - "size": { - "type" : "number", - "description" : "Deprecated, please use `max_docs` instead" - }, "max_docs": { "type" : "number", "description" : "Maximum number of documents to process (default: all documents)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index 69d36f44140a3..4048b4a55d962 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -90,10 +90,6 @@ "type" : "time", "description" : "Explicit timeout for each search request. Defaults to no timeout." }, - "size": { - "type" : "number", - "description" : "Deprecated, please use `max_docs` instead" - }, "max_docs": { "type" : "number", "description" : "Maximum number of documents to process (default: all documents)" diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java index c51934c9c04f1..0f27fb4753e9b 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java @@ -43,11 +43,6 @@ public abstract class AbstractBulkByScrollRequest> extends ActionRequest { public static final int MAX_DOCS_ALL_MATCHES = -1; - /** - * @deprecated please use MAX_DOCS_ALL_MATCHES instead. - */ - @Deprecated - public static final int SIZE_ALL_MATCHES = MAX_DOCS_ALL_MATCHES; public static final TimeValue DEFAULT_SCROLL_TIMEOUT = timeValueMinutes(5); public static final int DEFAULT_SCROLL_SIZE = 1000; @@ -168,27 +163,6 @@ public ActionRequestValidationException validate() { return e; } - /** - * Maximum number of processed documents. Defaults to -1 meaning process all - * documents. - * @deprecated please use getMaxDocs() instead. - */ - @Deprecated - public int getSize() { - return getMaxDocs(); - } - - /** - * Maximum number of processed documents. Defaults to -1 meaning process all - * documents. - * - * @deprecated please use setMaxDocs(int) instead. - */ - @Deprecated - public Self setSize(int size) { - return setMaxDocs(size); - } - /** * Maximum number of processed documents. Defaults to -1 meaning process all * documents. diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index 0e2cb84619885..bce848998159c 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -376,7 +376,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws PARSER.declareField(sourceParser::parse, new ParseField("source"), ObjectParser.ValueType.OBJECT); PARSER.declareField((p, v, c) -> destParser.parse(p, v.getDestination(), c), new ParseField("dest"), ObjectParser.ValueType.OBJECT); - PARSER.declareInt(ReindexRequest::setMaxDocsValidateIdentical, new ParseField("max_docs", "size")); + PARSER.declareInt(ReindexRequest::setMaxDocsValidateIdentical, new ParseField("max_docs")); + // avoid silently accepting an ignored size. + PARSER.declareInt((r,s) -> failOnSizeSpecified(), new ParseField("size")); PARSER.declareField((p, v, c) -> v.setScript(Script.parse(p)), new ParseField("script"), ObjectParser.ValueType.OBJECT); PARSER.declareString(ReindexRequest::setConflicts, new ParseField("conflicts")); @@ -492,4 +494,8 @@ static void setMaxDocsValidateIdentical(AbstractBulkByScrollRequest request, request.setMaxDocs(maxDocs); } } + + private static void failOnSizeSpecified() { + throw new IllegalArgumentException("invalid parameter [size], use [max_docs] instead"); + } } diff --git a/server/src/test/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestTestCase.java b/server/src/test/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestTestCase.java index 480bd5007b83d..dc7423c68596c 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestTestCase.java @@ -47,11 +47,7 @@ public void testForSlice() { original.setRequestsPerSecond( randomBoolean() ? Float.POSITIVE_INFINITY : randomValueOtherThanMany(r -> r < 0, ESTestCase::randomFloat)); if (randomBoolean()) { - if (randomBoolean()) { - original.setMaxDocs(between(0, Integer.MAX_VALUE)); - } else { - original.setSize(between(0, Integer.MAX_VALUE)); - } + original.setMaxDocs(between(0, Integer.MAX_VALUE)); } // it's not important how many slices there are, we just need a number for forSlice From 08a3549a1eab3dae3b3994ee09c82fa671b4f1e8 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 26 Jun 2019 10:03:16 +0200 Subject: [PATCH 009/140] Upgrade jcodings dependency to 1.0.44 (#43334) --- libs/grok/build.gradle | 2 +- libs/grok/licenses/jcodings-1.0.12.jar.sha1 | 1 - libs/grok/licenses/jcodings-1.0.44.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 libs/grok/licenses/jcodings-1.0.12.jar.sha1 create mode 100644 libs/grok/licenses/jcodings-1.0.44.jar.sha1 diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle index ca3634805195d..b7ae54e54c3af 100644 --- a/libs/grok/build.gradle +++ b/libs/grok/build.gradle @@ -20,7 +20,7 @@ dependencies { compile 'org.jruby.joni:joni:2.1.6' // joni dependencies: - compile 'org.jruby.jcodings:jcodings:1.0.12' + compile 'org.jruby.jcodings:jcodings:1.0.44' if (isEclipse == false || project.path == ":libs:elasticsearch-grok-tests") { testCompile(project(":test:framework")) { diff --git a/libs/grok/licenses/jcodings-1.0.12.jar.sha1 b/libs/grok/licenses/jcodings-1.0.12.jar.sha1 deleted file mode 100644 index b097e32ece493..0000000000000 --- a/libs/grok/licenses/jcodings-1.0.12.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6bc17079fcaa8823ea8cd0d4c66516335b558db8 \ No newline at end of file diff --git a/libs/grok/licenses/jcodings-1.0.44.jar.sha1 b/libs/grok/licenses/jcodings-1.0.44.jar.sha1 new file mode 100644 index 0000000000000..4449009d3395e --- /dev/null +++ b/libs/grok/licenses/jcodings-1.0.44.jar.sha1 @@ -0,0 +1 @@ +a6884b2fd8fd9a56874db05afaa22435043a2e3e \ No newline at end of file From 7c3720ab166fbb8cce71d16d6cb159d1a8c9c63f Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 26 Jun 2019 10:12:21 +0100 Subject: [PATCH 010/140] Mute DiskDisruptionIT#testGlobalCheckpointIsSafe Relates to #43626 --- .../test/java/org/elasticsearch/discovery/DiskDisruptionIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/discovery/DiskDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/DiskDisruptionIT.java index dac48d89e78b3..4ac9fb5d32e48 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiskDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiskDisruptionIT.java @@ -98,6 +98,7 @@ public FileChannel newFileChannel(Path path, Set options, * It simulates a full power outage by preventing translog checkpoint files to be written and restart the cluster. This means that * all un-fsynced data will be lost. */ + @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/43626") public void testGlobalCheckpointIsSafe() throws Exception { startCluster(rarely() ? 5 : 3); From fc5ce18ae611744ea8c19bf88bd1d03f748672ac Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 26 Jun 2019 11:46:25 +0100 Subject: [PATCH 011/140] Mute VotingOnlyNodeCoordinatorTests#testDoesNotElectVotingOnlyMasterNode Relates to #43631 --- .../cluster/coordination/VotingOnlyNodeCoordinatorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeCoordinatorTests.java b/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeCoordinatorTests.java index 2c8d44f63175d..5048232bd4ecf 100644 --- a/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeCoordinatorTests.java +++ b/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeCoordinatorTests.java @@ -19,6 +19,7 @@ public class VotingOnlyNodeCoordinatorTests extends AbstractCoordinatorTestCase { + @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/43631") public void testDoesNotElectVotingOnlyMasterNode() { final Cluster cluster = new Cluster(randomIntBetween(1, 5), false, Settings.EMPTY); cluster.runRandomly(); From 3884cfb201ea0fcc5eeacbe8392e528696670639 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 26 Jun 2019 11:52:06 +0100 Subject: [PATCH 012/140] [TEST] Wait for replicas before stopping nodes in ML distributed test (#43622) If we stop a node before replicas exist then the test can fail because we lose a whole index if we stop the node with the primary on. --- .../xpack/ml/integration/MlDistributedFailureIT.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index b06b83dc3d144..297e45cd3caaa 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -146,7 +146,9 @@ public void testCloseUnassignedJobAndDatafeed() throws Exception { waitForDatafeed(jobId, numDocs1); // stop the only ML node + ensureGreen(); // replicas must be assigned, otherwise we could lose a whole index internalCluster().stopRandomNonMasterNode(); + ensureStableCluster(1); // Job state is opened but the job is not assigned to a node (because we just killed the only ML node) GetJobsStatsAction.Request jobStatsRequest = new GetJobsStatsAction.Request(jobId); @@ -198,7 +200,7 @@ public void testJobRelocationIsMemoryAware() throws Exception { // Wait for the cluster to be green - this means the indices have been replicated. - ensureGreen(".ml-config", ".ml-anomalies-shared", ".ml-notifications"); + ensureGreen(); // Open a big job. This should go on a different node to the 4 small ones. From e7373611a6acf190cf85cf880dbc59e1ab590f4f Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 26 Jun 2019 06:53:32 -0500 Subject: [PATCH 013/140] [ML][Data Frame] improve pivot nested field validations (#43548) * [ML][Data Frame] improve pivot nested field validations * addressing pr comments --- .../PreviewDataFrameTransformAction.java | 11 ++++- .../test/data_frame/preview_transforms.yml | 49 ++++++++++++++----- 2 files changed, 46 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformAction.java index 7a2e05798908c..6108136a87b7f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformAction.java @@ -33,6 +33,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; + public class PreviewDataFrameTransformAction extends Action { public static final PreviewDataFrameTransformAction INSTANCE = new PreviewDataFrameTransformAction(); @@ -94,9 +96,16 @@ public static Request fromXContent(final XContentParser parser) throws IOExcepti @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + if(config.getPivotConfig() != null) { + for(String failure : config.getPivotConfig().aggFieldValidation()) { + validationException = addValidationError(failure, validationException); + } + } + return validationException; } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return this.config.toXContent(builder, params); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml index 98ef4039eafe4..44a8225ef245a 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml @@ -75,7 +75,7 @@ setup: "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}, - "by-hour": {"date_histogram": {"fixed_interval": "1h", "field": "time", "format": "yyyy-MM-dd HH"}}}, + "by-hour": {"date_histogram": {"fixed_interval": "1h", "field": "time"}}}, "aggs": { "avg_response": {"avg": {"field": "responsetime"}}, "time.max": {"max": {"field": "time"}}, @@ -84,17 +84,17 @@ setup: } } - match: { preview.0.airline: foo } - - match: { preview.0.by-hour: "2017-02-18 00" } + - match: { preview.0.by-hour: 1487376000000 } - match: { preview.0.avg_response: 1.0 } - match: { preview.0.time.max: "2017-02-18T00:30:00.000Z" } - match: { preview.0.time.min: "2017-02-18T00:00:00.000Z" } - match: { preview.1.airline: bar } - - match: { preview.1.by-hour: "2017-02-18 01" } + - match: { preview.1.by-hour: 1487379600000 } - match: { preview.1.avg_response: 42.0 } - match: { preview.1.time.max: "2017-02-18T01:00:00.000Z" } - match: { preview.1.time.min: "2017-02-18T01:00:00.000Z" } - match: { preview.2.airline: foo } - - match: { preview.2.by-hour: "2017-02-18 01" } + - match: { preview.2.by-hour: 1487379600000 } - match: { preview.2.avg_response: 42.0 } - match: { preview.2.time.max: "2017-02-18T01:01:00.000Z" } - match: { preview.2.time.min: "2017-02-18T01:01:00.000Z" } @@ -123,22 +123,22 @@ setup: "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}, - "by-hour": {"date_histogram": {"fixed_interval": "1h", "field": "time", "format": "yyyy-MM-dd HH"}}}, + "by-hour": {"date_histogram": {"fixed_interval": "1h", "field": "time"}}}, "aggs": { "avg_response": {"avg": {"field": "responsetime"}} } } } - match: { preview.0.airline: foo } - - match: { preview.0.by-hour: "2017-02-18 00" } + - match: { preview.0.by-hour: 1487376000000 } - match: { preview.0.avg_response: 1.0 } - match: { preview.0.my_field: 42 } - match: { preview.1.airline: bar } - - match: { preview.1.by-hour: "2017-02-18 01" } + - match: { preview.1.by-hour: 1487379600000 } - match: { preview.1.avg_response: 42.0 } - match: { preview.1.my_field: 42 } - match: { preview.2.airline: foo } - - match: { preview.2.by-hour: "2017-02-18 01" } + - match: { preview.2.by-hour: 1487379600000 } - match: { preview.2.avg_response: 42.0 } - match: { preview.2.my_field: 42 } @@ -166,7 +166,7 @@ setup: "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}, - "by-hour": {"date_histogram": {"fixed_interval": "1h", "field": "time", "format": "yyyy-MM-dd HH"}}}, + "by-hour": {"date_histogram": {"fixed_interval": "1h", "field": "time"}}}, "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} } } @@ -180,7 +180,7 @@ setup: "source": { "index": "airline-data" }, "pivot": { "group_by": { - "time": {"date_histogram": {"fixed_interval": "1h", "field": "time", "format": "yyyy-MM-DD HH"}}}, + "time": {"date_histogram": {"fixed_interval": "1h", "field": "time"}}}, "aggs": { "avg_response": {"avg": {"field": "responsetime"}}, "time.min": {"min": {"field": "time"}} @@ -189,20 +189,43 @@ setup: } - do: - catch: /mixed object types of nested and non-nested fields \[time.min\]/ + catch: /field \[time\] cannot be both an object and a field/ data_frame.preview_data_frame_transform: body: > { "source": { "index": "airline-data" }, "pivot": { "group_by": { - "time": {"date_histogram": {"fixed_interval": "1h", "field": "time", "format": "yyyy-MM-DD HH"}}}, + "time": {"date_histogram": {"fixed_interval": "1h", "field": "time"}}}, "aggs": { "avg_response": {"avg": {"field": "responsetime"}}, "time.min": {"min": {"field": "time"}} } } } + - do: + catch: /field \[super_metric\] cannot be both an object and a field/ + data_frame.preview_data_frame_transform: + body: > + { + "source": { "index": "airline-data" }, + "pivot": { + "group_by": { + "time": {"date_histogram": {"fixed_interval": "1h", "field": "time"}}}, + "aggs": { + "avg_response": {"avg": {"field": "responsetime"}}, + "super_metric.time": {"min": {"field": "time"}}, + "super_metric": { + "scripted_metric": { + "init_script": "", + "map_script": "", + "combine_script": "", + "reduce_script": "return ['value1': 1, 'value2':2]" + } + } + } + } + } --- "Test preview with missing pipeline": - do: @@ -214,7 +237,7 @@ setup: "dest": { "pipeline": "missing-pipeline" }, "pivot": { "group_by": { - "time": {"date_histogram": {"fixed_interval": "1h", "field": "time", "format": "yyyy-MM-DD HH"}}}, + "time": {"date_histogram": {"fixed_interval": "1h", "field": "time"}}}, "aggs": { "avg_response": {"avg": {"field": "responsetime"}}, "time.min": {"min": {"field": "time"}} From 90844189370af18ce5de0b6f8934b9633031ae57 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 26 Jun 2019 06:55:50 -0500 Subject: [PATCH 014/140] [ML][Data Frame] Add support for allow_no_match for endpoints (#43490) * [ML][Data Frame] Add support for allow_no_match parameter in endpoints Adds support for: * Get Transforms * Get Transforms stats * stop transforms --- .../client/DataFrameRequestConverters.java | 40 ++++++++++++------- .../GetDataFrameTransformRequest.java | 16 +++++++- .../GetDataFrameTransformStatsRequest.java | 15 ++++++- .../StopDataFrameTransformRequest.java | 14 ++++++- .../DataFrameRequestConvertersTests.java | 17 +++++++- .../DataFrameTransformDocumentationIT.java | 7 ++++ .../dataframe/get_data_frame.asciidoc | 1 + .../dataframe/get_data_frame_stats.asciidoc | 13 ++++++ .../dataframe/stop_data_frame.asciidoc | 1 + .../apis/get-transform-stats.asciidoc | 6 +++ .../data-frames/apis/get-transform.asciidoc | 6 +++ .../data-frames/apis/stop-transform.asciidoc | 6 ++- .../xpack/core/dataframe/DataFrameField.java | 1 + .../GetDataFrameTransformsStatsAction.java | 21 +++++++++- .../action/StopDataFrameTransformAction.java | 22 ++++++++-- ...pDataFrameTransformActionRequestTests.java | 11 ++--- ...portGetDataFrameTransformsStatsAction.java | 6 ++- ...TransportStopDataFrameTransformAction.java | 6 ++- .../DataFrameTransformsConfigManager.java | 3 +- .../RestGetDataFrameTransformsAction.java | 3 ++ ...RestGetDataFrameTransformsStatsAction.java | 3 ++ .../RestStopDataFrameTransformAction.java | 8 +++- ...DataFrameTransformsConfigManagerTests.java | 21 ++++++++++ .../data_frame.get_data_frame_transform.json | 5 +++ ..._frame.get_data_frame_transform_stats.json | 5 +++ .../data_frame.stop_data_frame_transform.json | 5 +++ .../test/data_frame/transforms_crud.yml | 6 +++ .../test/data_frame/transforms_start_stop.yml | 13 ++++++ 28 files changed, 242 insertions(+), 39 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java index 00d2651a1aeb8..18dfc2305575d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java @@ -37,6 +37,7 @@ import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; import static org.elasticsearch.client.RequestConverters.createEntity; +import static org.elasticsearch.client.dataframe.GetDataFrameTransformRequest.ALLOW_NO_MATCH; final class DataFrameRequestConverters { @@ -64,6 +65,9 @@ static Request getDataFrameTransform(GetDataFrameTransformRequest getRequest) { if (getRequest.getPageParams() != null && getRequest.getPageParams().getSize() != null) { request.addParameter(PageParams.SIZE.getPreferredName(), getRequest.getPageParams().getSize().toString()); } + if (getRequest.getAllowNoMatch() != null) { + request.addParameter(ALLOW_NO_MATCH, getRequest.getAllowNoMatch().toString()); + } return request; } @@ -91,21 +95,24 @@ static Request startDataFrameTransform(StartDataFrameTransformRequest startReque } static Request stopDataFrameTransform(StopDataFrameTransformRequest stopRequest) { - String endpoint = new RequestConverters.EndpointBuilder() - .addPathPartAsIs("_data_frame", "transforms") - .addPathPart(stopRequest.getId()) - .addPathPartAsIs("_stop") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (stopRequest.getWaitForCompletion() != null) { - params.withWaitForCompletion(stopRequest.getWaitForCompletion()); - } - if (stopRequest.getTimeout() != null) { - params.withTimeout(stopRequest.getTimeout()); - } - request.addParameters(params.asMap()); - return request; + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_data_frame", "transforms") + .addPathPart(stopRequest.getId()) + .addPathPartAsIs("_stop") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(); + if (stopRequest.getWaitForCompletion() != null) { + params.withWaitForCompletion(stopRequest.getWaitForCompletion()); + } + if (stopRequest.getTimeout() != null) { + params.withTimeout(stopRequest.getTimeout()); + } + if (stopRequest.getAllowNoMatch() != null) { + request.addParameter(ALLOW_NO_MATCH, stopRequest.getAllowNoMatch().toString()); + } + request.addParameters(params.asMap()); + return request; } static Request previewDataFrameTransform(PreviewDataFrameTransformRequest previewRequest) throws IOException { @@ -130,6 +137,9 @@ static Request getDataFrameTransformStats(GetDataFrameTransformStatsRequest stat if (statsRequest.getPageParams() != null && statsRequest.getPageParams().getSize() != null) { request.addParameter(PageParams.SIZE.getPreferredName(), statsRequest.getPageParams().getSize().toString()); } + if (statsRequest.getAllowNoMatch() != null) { + request.addParameter(ALLOW_NO_MATCH, statsRequest.getAllowNoMatch().toString()); + } return request; } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformRequest.java index c50f37a27c885..cc69e0bd4cd4e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformRequest.java @@ -30,6 +30,7 @@ public class GetDataFrameTransformRequest implements Validatable { + public static final String ALLOW_NO_MATCH = "allow_no_match"; /** * Helper method to create a request that will get ALL Data Frame Transforms * @return new {@link GetDataFrameTransformRequest} object for the id "_all" @@ -40,6 +41,7 @@ public static GetDataFrameTransformRequest getAllDataFrameTransformsRequest() { private final List ids; private PageParams pageParams; + private Boolean allowNoMatch; public GetDataFrameTransformRequest(String... ids) { this.ids = Arrays.asList(ids); @@ -57,6 +59,14 @@ public void setPageParams(PageParams pageParams) { this.pageParams = pageParams; } + public Boolean getAllowNoMatch() { + return allowNoMatch; + } + + public void setAllowNoMatch(Boolean allowNoMatch) { + this.allowNoMatch = allowNoMatch; + } + @Override public Optional validate() { if (ids == null || ids.isEmpty()) { @@ -70,7 +80,7 @@ public Optional validate() { @Override public int hashCode() { - return Objects.hash(ids, pageParams); + return Objects.hash(ids, pageParams, allowNoMatch); } @Override @@ -83,6 +93,8 @@ public boolean equals(Object obj) { return false; } GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj; - return Objects.equals(ids, other.ids) && Objects.equals(pageParams, other.pageParams); + return Objects.equals(ids, other.ids) + && Objects.equals(pageParams, other.pageParams) + && Objects.equals(allowNoMatch, other.allowNoMatch); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformStatsRequest.java index 4a105f7b40c7e..7522ae0d67c26 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformStatsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformStatsRequest.java @@ -29,6 +29,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable { private final String id; private PageParams pageParams; + private Boolean allowNoMatch; public GetDataFrameTransformStatsRequest(String id) { this.id = id; @@ -46,6 +47,14 @@ public void setPageParams(PageParams pageParams) { this.pageParams = pageParams; } + public Boolean getAllowNoMatch() { + return allowNoMatch; + } + + public void setAllowNoMatch(Boolean allowNoMatch) { + this.allowNoMatch = allowNoMatch; + } + @Override public Optional validate() { if (id == null) { @@ -59,7 +68,7 @@ public Optional validate() { @Override public int hashCode() { - return Objects.hash(id, pageParams); + return Objects.hash(id, pageParams, allowNoMatch); } @Override @@ -72,6 +81,8 @@ public boolean equals(Object obj) { return false; } GetDataFrameTransformStatsRequest other = (GetDataFrameTransformStatsRequest) obj; - return Objects.equals(id, other.id) && Objects.equals(pageParams, other.pageParams); + return Objects.equals(id, other.id) + && Objects.equals(pageParams, other.pageParams) + && Objects.equals(allowNoMatch, other.allowNoMatch); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/StopDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/StopDataFrameTransformRequest.java index 0bc690ad79076..4fb6164f2cca9 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/StopDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/StopDataFrameTransformRequest.java @@ -31,6 +31,7 @@ public class StopDataFrameTransformRequest implements Validatable { private final String id; private Boolean waitForCompletion; private TimeValue timeout; + private Boolean allowNoMatch; public StopDataFrameTransformRequest(String id) { this.id = id; @@ -64,6 +65,14 @@ public TimeValue getTimeout() { return timeout; } + public Boolean getAllowNoMatch() { + return allowNoMatch; + } + + public void setAllowNoMatch(Boolean allowNoMatch) { + this.allowNoMatch = allowNoMatch; + } + @Override public Optional validate() { if (id == null) { @@ -77,7 +86,7 @@ public Optional validate() { @Override public int hashCode() { - return Objects.hash(id, waitForCompletion, timeout); + return Objects.hash(id, waitForCompletion, timeout, allowNoMatch); } @Override @@ -92,7 +101,8 @@ public boolean equals(Object obj) { StopDataFrameTransformRequest other = (StopDataFrameTransformRequest) obj; return Objects.equals(this.id, other.id) && Objects.equals(this.waitForCompletion, other.waitForCompletion) - && Objects.equals(this.timeout, other.timeout); + && Objects.equals(this.timeout, other.timeout) + && Objects.equals(this.allowNoMatch, other.allowNoMatch); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java index 26a4ade504682..db111904f4704 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java @@ -46,6 +46,7 @@ import java.util.Collections; import java.util.List; +import static org.elasticsearch.client.dataframe.GetDataFrameTransformRequest.ALLOW_NO_MATCH; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; @@ -115,7 +116,6 @@ public void testStopDataFrameTransform() { } StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, waitForCompletion, timeValue); - Request request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + stopRequest.getId() + "/_stop")); @@ -133,6 +133,11 @@ public void testStopDataFrameTransform() { } else { assertFalse(request.getParameters().containsKey("timeout")); } + + assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH)); + stopRequest.setAllowNoMatch(randomBoolean()); + request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest); + assertEquals(stopRequest.getAllowNoMatch(), Boolean.parseBoolean(request.getParameters().get(ALLOW_NO_MATCH))); } public void testPreviewDataFrameTransform() throws IOException { @@ -158,6 +163,7 @@ public void testGetDataFrameTransformStats() { assertFalse(request.getParameters().containsKey("from")); assertFalse(request.getParameters().containsKey("size")); + assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH)); getStatsRequest.setPageParams(new PageParams(0, null)); request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); @@ -172,6 +178,10 @@ public void testGetDataFrameTransformStats() { getStatsRequest.setPageParams(new PageParams(0, 10)); request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10"))); + + getStatsRequest.setAllowNoMatch(false); + request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + assertThat(request.getParameters(), hasEntry("allow_no_match", "false")); } public void testGetDataFrameTransform() { @@ -183,6 +193,7 @@ public void testGetDataFrameTransform() { assertFalse(request.getParameters().containsKey("from")); assertFalse(request.getParameters().containsKey("size")); + assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH)); getRequest.setPageParams(new PageParams(0, null)); request = DataFrameRequestConverters.getDataFrameTransform(getRequest); @@ -197,6 +208,10 @@ public void testGetDataFrameTransform() { getRequest.setPageParams(new PageParams(0, 10)); request = DataFrameRequestConverters.getDataFrameTransform(getRequest); assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10"))); + + getRequest.setAllowNoMatch(false); + request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + assertThat(request.getParameters(), hasEntry("allow_no_match", "false")); } public void testGetDataFrameTransform_givenMulitpleIds() { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java index b3fa85880b465..60d9a91d1c28e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java @@ -263,6 +263,7 @@ public void testStartStop() throws IOException, InterruptedException { // tag::stop-data-frame-transform-request-options request.setWaitForCompletion(Boolean.TRUE); // <1> request.setTimeout(TimeValue.timeValueSeconds(30)); // <2> + request.setAllowNoMatch(true); // <3> // end::stop-data-frame-transform-request-options // tag::stop-data-frame-transform-execute @@ -506,6 +507,11 @@ public void testGetStats() throws IOException, InterruptedException { new GetDataFrameTransformStatsRequest(id); // <1> // end::get-data-frame-transform-stats-request + // tag::get-data-frame-transform-stats-request-options + request.setPageParams(new PageParams(0, 100)); // <1> + request.setAllowNoMatch(true); // <2> + // end::get-data-frame-transform-stats-request-params + { // tag::get-data-frame-transform-stats-execute GetDataFrameTransformStatsResponse response = @@ -597,6 +603,7 @@ public void testGetDataFrameTransform() throws IOException, InterruptedException // tag::get-data-frame-transform-request-options request.setPageParams(new PageParams(0, 100)); // <1> + request.setAllowNoMatch(true); // <2> // end::get-data-frame-transform-request-options // tag::get-data-frame-transform-execute diff --git a/docs/java-rest/high-level/dataframe/get_data_frame.asciidoc b/docs/java-rest/high-level/dataframe/get_data_frame.asciidoc index ec2253b2c25f4..995d9d2c08963 100644 --- a/docs/java-rest/high-level/dataframe/get_data_frame.asciidoc +++ b/docs/java-rest/high-level/dataframe/get_data_frame.asciidoc @@ -32,6 +32,7 @@ include-tagged::{doc-tests-file}[{api}-request-options] <1> The page parameters `from` and `size`. `from` specifies the number of {dataframe-transforms} to skip. `size` specifies the maximum number of {dataframe-transforms} to get. Defaults to `0` and `100` respectively. +<2> Whether to ignore if a wildcard expression matches no transforms. include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/dataframe/get_data_frame_stats.asciidoc b/docs/java-rest/high-level/dataframe/get_data_frame_stats.asciidoc index cdc6254a4e443..4360157b4a445 100644 --- a/docs/java-rest/high-level/dataframe/get_data_frame_stats.asciidoc +++ b/docs/java-rest/high-level/dataframe/get_data_frame_stats.asciidoc @@ -22,6 +22,19 @@ include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> Constructing a new GET Stats request referencing an existing {dataframe-transform} +==== Optional Arguments + +The following arguments are optional. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request-options] +-------------------------------------------------- +<1> The page parameters `from` and `size`. `from` specifies the number of data frame transform stats to skip. +`size` specifies the maximum number of data frame transform stats to get. +Defaults to `0` and `100` respectively. +<2> Whether to ignore if a wildcard expression matches no transforms. + include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/dataframe/stop_data_frame.asciidoc b/docs/java-rest/high-level/dataframe/stop_data_frame.asciidoc index 9b05687c00875..1de4af5c5d592 100644 --- a/docs/java-rest/high-level/dataframe/stop_data_frame.asciidoc +++ b/docs/java-rest/high-level/dataframe/stop_data_frame.asciidoc @@ -32,6 +32,7 @@ include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- <1> If true wait for the data frame task to stop before responding <2> Controls the amount of time to wait until the {dataframe-job} stops. +<3> Whether to ignore if a wildcard expression matches no transforms. include::../execution.asciidoc[] diff --git a/docs/reference/data-frames/apis/get-transform-stats.asciidoc b/docs/reference/data-frames/apis/get-transform-stats.asciidoc index 4c91c0cf4a6a2..ff7e023d20fed 100644 --- a/docs/reference/data-frames/apis/get-transform-stats.asciidoc +++ b/docs/reference/data-frames/apis/get-transform-stats.asciidoc @@ -36,6 +36,8 @@ Retrieves usage information for {dataframe-transforms}. specify one of these options, the API returns information for all {dataframe-transforms}. +==== Query Parameters + `from`:: (integer) Skips the specified number of {dataframe-transforms}. The default value is `0`. @@ -43,6 +45,10 @@ Retrieves usage information for {dataframe-transforms}. `size`:: (integer) Specifies the maximum number of {dataframe-transforms} to obtain. The default value is `100`. +`allow_no_match`:: + (boolean) Whether to ignore if a wildcard expression matches no data frame transforms. + This includes `_all` string or when no transforms have been specified. The default is `true`. + ==== Results The API returns the following information: diff --git a/docs/reference/data-frames/apis/get-transform.asciidoc b/docs/reference/data-frames/apis/get-transform.asciidoc index c46bd99138e6b..7a9e86cb520ed 100644 --- a/docs/reference/data-frames/apis/get-transform.asciidoc +++ b/docs/reference/data-frames/apis/get-transform.asciidoc @@ -35,6 +35,8 @@ Retrieves configuration information for {dataframe-transforms}. specify one of these options, the API returns information for all {dataframe-transforms}. +==== Query Parameters + `from`:: (integer) Skips the specified number of {dataframe-transforms}. The default value is `0`. @@ -42,6 +44,10 @@ Retrieves configuration information for {dataframe-transforms}. `size`:: (integer) Specifies the maximum number of {dataframe-transforms} to obtain. The default value is `100`. +`allow_no_match`:: + (boolean) Whether to ignore if a wildcard expression matches no data frame transforms. + This includes `_all` string or when no transforms have been specified. The default is `true`. + ==== Results The API returns the following information: diff --git a/docs/reference/data-frames/apis/stop-transform.asciidoc b/docs/reference/data-frames/apis/stop-transform.asciidoc index 58bff626f7da6..3620c24a88d3d 100644 --- a/docs/reference/data-frames/apis/stop-transform.asciidoc +++ b/docs/reference/data-frames/apis/stop-transform.asciidoc @@ -45,7 +45,11 @@ All {dataframe-transforms} can be stopped by using `_all` or `*` as the ` getResponseReader() { public static class Request extends BaseTasksRequest { private final String id; private PageParams pageParams = PageParams.defaultParams(); + private boolean allowNoMatch = true; public static final int MAX_SIZE_RETURN = 1000; // used internally to expand the queried id expression @@ -74,6 +75,9 @@ public Request(StreamInput in) throws IOException { id = in.readString(); expandedIds = Collections.unmodifiableList(in.readStringList()); pageParams = new PageParams(in); + if (in.getVersion().onOrAfter(Version.V_7_3_0)) { + allowNoMatch = in.readBoolean(); + } } @Override @@ -103,12 +107,23 @@ public final PageParams getPageParams() { return pageParams; } + public boolean isAllowNoMatch() { + return allowNoMatch; + } + + public void setAllowNoMatch(boolean allowNoMatch) { + this.allowNoMatch = allowNoMatch; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); out.writeStringCollection(expandedIds); pageParams.writeTo(out); + if (out.getVersion().onOrAfter(Version.V_7_3_0)) { + out.writeBoolean(allowNoMatch); + } } @Override @@ -123,7 +138,7 @@ public ActionRequestValidationException validate() { @Override public int hashCode() { - return Objects.hash(id, pageParams); + return Objects.hash(id, pageParams, allowNoMatch); } @Override @@ -135,7 +150,9 @@ public boolean equals(Object obj) { return false; } Request other = (Request) obj; - return Objects.equals(id, other.id) && Objects.equals(pageParams, other.pageParams); + return Objects.equals(id, other.id) + && Objects.equals(pageParams, other.pageParams) + && allowNoMatch == other.allowNoMatch; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java index e170e5e475fd2..0cbe7a45b636f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.dataframe.action; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.TaskOperationFailure; @@ -56,15 +57,17 @@ public static class Request extends BaseTasksRequest { private final String id; private final boolean waitForCompletion; private final boolean force; + private final boolean allowNoMatch; private Set expandedIds; - public Request(String id, boolean waitForCompletion, boolean force, @Nullable TimeValue timeout) { + public Request(String id, boolean waitForCompletion, boolean force, @Nullable TimeValue timeout, boolean allowNoMatch) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); this.waitForCompletion = waitForCompletion; this.force = force; // use the timeout value already present in BaseTasksRequest this.setTimeout(timeout == null ? DEFAULT_TIMEOUT : timeout); + this.allowNoMatch = allowNoMatch; } public Request(StreamInput in) throws IOException { @@ -75,6 +78,11 @@ public Request(StreamInput in) throws IOException { if (in.readBoolean()) { expandedIds = new HashSet<>(Arrays.asList(in.readStringArray())); } + if (in.getVersion().onOrAfter(Version.V_7_3_0)) { + this.allowNoMatch = in.readBoolean(); + } else { + this.allowNoMatch = true; + } } public String getId() { @@ -97,6 +105,10 @@ public void setExpandedIds(Set expandedIds ) { this.expandedIds = expandedIds; } + public boolean isAllowNoMatch() { + return allowNoMatch; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -108,6 +120,9 @@ public void writeTo(StreamOutput out) throws IOException { if (hasExpandedIds) { out.writeStringArray(expandedIds.toArray(new String[0])); } + if (out.getVersion().onOrAfter(Version.V_7_3_0)) { + out.writeBoolean(allowNoMatch); + } } @Override @@ -118,7 +133,7 @@ public ActionRequestValidationException validate() { @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves - return Objects.hash(id, waitForCompletion, force, expandedIds, this.getTimeout()); + return Objects.hash(id, waitForCompletion, force, expandedIds, this.getTimeout(), allowNoMatch); } @Override @@ -140,7 +155,8 @@ public boolean equals(Object obj) { return Objects.equals(id, other.id) && Objects.equals(waitForCompletion, other.waitForCompletion) && Objects.equals(force, other.force) && - Objects.equals(expandedIds, other.expandedIds); + Objects.equals(expandedIds, other.expandedIds) && + allowNoMatch == other.allowNoMatch; } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformActionRequestTests.java index 81f03902980ac..cce889baa9675 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformActionRequestTests.java @@ -24,7 +24,7 @@ public class StopDataFrameTransformActionRequestTests extends AbstractWireSerial @Override protected Request createTestInstance() { TimeValue timeout = randomBoolean() ? TimeValue.timeValueMinutes(randomIntBetween(1, 10)) : null; - Request request = new Request(randomAlphaOfLengthBetween(1, 10), randomBoolean(), randomBoolean(), timeout); + Request request = new Request(randomAlphaOfLengthBetween(1, 10), randomBoolean(), randomBoolean(), timeout, randomBoolean()); if (randomBoolean()) { request.setExpandedIds(new HashSet<>(Arrays.asList(generateRandomStringArray(5, 6, false)))); } @@ -40,9 +40,10 @@ public void testSameButDifferentTimeout() { String id = randomAlphaOfLengthBetween(1, 10); boolean waitForCompletion = randomBoolean(); boolean force = randomBoolean(); + boolean allowNoMatch = randomBoolean(); - Request r1 = new Request(id, waitForCompletion, force, TimeValue.timeValueSeconds(10)); - Request r2 = new Request(id, waitForCompletion, force, TimeValue.timeValueSeconds(20)); + Request r1 = new Request(id, waitForCompletion, force, TimeValue.timeValueSeconds(10), allowNoMatch); + Request r2 = new Request(id, waitForCompletion, force, TimeValue.timeValueSeconds(20), allowNoMatch); assertNotEquals(r1,r2); assertNotEquals(r1.hashCode(),r2.hashCode()); @@ -55,11 +56,11 @@ public void testMatch() { DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + dataFrameId, TaskId.EMPTY_TASK_ID, Collections.emptyMap()); - Request request = new Request("unrelated", false, false, null); + Request request = new Request("unrelated", false, false, null, false); request.setExpandedIds(Set.of("foo", "bar")); assertFalse(request.match(dataFrameTask)); - Request matchingRequest = new Request(dataFrameId, false, false, null); + Request matchingRequest = new Request(dataFrameId, false, false, null, false); matchingRequest.setExpandedIds(Set.of(dataFrameId)); assertTrue(matchingRequest.match(dataFrameTask)); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java index 3c5678d2d5dda..6b578d040d2fd 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java @@ -105,8 +105,10 @@ protected void taskOperation(Request request, DataFrameTransformTask task, Actio @Override protected void doExecute(Task task, Request request, ActionListener finalListener) { - dataFrameTransformsConfigManager.expandTransformIds(request.getId(), request.getPageParams(), ActionListener.wrap( - hitsAndIds -> { + dataFrameTransformsConfigManager.expandTransformIds(request.getId(), + request.getPageParams(), + request.isAllowNoMatch(), + ActionListener.wrap(hitsAndIds -> { request.setExpandedIds(hitsAndIds.v2()); request.setNodes(DataFrameNodes.dataFrameTaskNodes(hitsAndIds.v2(), clusterService.state())); super.doExecute(task, request, ActionListener.wrap( diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java index c3ebb4e5460b6..35a9d19658345 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java @@ -84,8 +84,10 @@ protected void doExecute(Task task, StopDataFrameTransformAction.Request request finalListener = listener; } - dataFrameTransformsConfigManager.expandTransformIds(request.getId(), new PageParams(0, 10_000), ActionListener.wrap( - hitsAndIds -> { + dataFrameTransformsConfigManager.expandTransformIds(request.getId(), + new PageParams(0, 10_000), + request.isAllowNoMatch(), + ActionListener.wrap(hitsAndIds -> { request.setExpandedIds(new HashSet<>(hitsAndIds.v2())); request.setNodes(DataFrameNodes.dataFrameTaskNodes(hitsAndIds.v2(), clusterService.state())); super.doExecute(task, request, finalListener); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java index 63184fefef861..bce929e23a7b2 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java @@ -199,6 +199,7 @@ public void getTransformConfiguration(String transformId, ActionListener>> foundIdsListener) { String[] idTokens = ExpandedIdsMatcher.tokenizeExpression(transformIdsExpression); QueryBuilder queryBuilder = buildQueryFromTokenizedIds(idTokens, DataFrameTransformConfig.NAME); @@ -213,7 +214,7 @@ public void expandTransformIds(String transformIdsExpression, .setFetchSource(DataFrameField.ID.getPreferredName(), "") .request(); - final ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(idTokens, true); + final ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(idTokens, allowNoMatch); executeAsyncWithOrigin(client.threadPool().getThreadContext(), DATA_FRAME_ORIGIN, request, ActionListener.wrap( diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java index d079b2fd72cc9..95f6ec79fb18d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java @@ -16,6 +16,8 @@ import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsAction; +import static org.elasticsearch.xpack.core.dataframe.DataFrameField.ALLOW_NO_MATCH; + public class RestGetDataFrameTransformsAction extends BaseRestHandler { public RestGetDataFrameTransformsAction(Settings settings, RestController controller) { @@ -30,6 +32,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String id = restRequest.param(DataFrameField.ID.getPreferredName()); request.setResourceId(id); + request.setAllowNoResources(restRequest.paramAsBoolean(ALLOW_NO_MATCH.getPreferredName(), true)); if (restRequest.hasParam(PageParams.FROM.getPreferredName()) || restRequest.hasParam(PageParams.SIZE.getPreferredName())) { request.setPageParams( new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java index f2d14f8106958..f6d8dd40e5c6d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java @@ -15,6 +15,8 @@ import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction; +import static org.elasticsearch.xpack.core.dataframe.DataFrameField.ALLOW_NO_MATCH; + public class RestGetDataFrameTransformsStatsAction extends BaseRestHandler { public RestGetDataFrameTransformsStatsAction(Settings settings, RestController controller) { @@ -27,6 +29,7 @@ public RestGetDataFrameTransformsStatsAction(Settings settings, RestController c protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(DataFrameField.ID.getPreferredName()); GetDataFrameTransformsStatsAction.Request request = new GetDataFrameTransformsStatsAction.Request(id); + request.setAllowNoMatch(restRequest.paramAsBoolean(ALLOW_NO_MATCH.getPreferredName(), true)); if (restRequest.hasParam(PageParams.FROM.getPreferredName()) || restRequest.hasParam(PageParams.SIZE.getPreferredName())) { request.setPageParams( new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java index d34478b9ba941..e06cd7df45377 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java @@ -30,8 +30,14 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient StopDataFrameTransformAction.DEFAULT_TIMEOUT); boolean waitForCompletion = restRequest.paramAsBoolean(DataFrameField.WAIT_FOR_COMPLETION.getPreferredName(), false); boolean force = restRequest.paramAsBoolean(DataFrameField.FORCE.getPreferredName(), false); + boolean allowNoMatch = restRequest.paramAsBoolean(DataFrameField.ALLOW_NO_MATCH.getPreferredName(), false); - StopDataFrameTransformAction.Request request = new StopDataFrameTransformAction.Request(id, waitForCompletion, force, timeout); + + StopDataFrameTransformAction.Request request = new StopDataFrameTransformAction.Request(id, + waitForCompletion, + force, + timeout, + allowNoMatch); return channel -> client.execute(StopDataFrameTransformAction.INSTANCE, request, new BaseTasksResponseToXContentListener<>(channel)); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java index 9b03e4502d155..eef5ce099b5f7 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java @@ -159,6 +159,7 @@ public void testExpandIds() throws Exception { assertAsync(listener -> transformsConfigManager.expandTransformIds(transformConfig1.getId(), PageParams.defaultParams(), + true, listener), new Tuple<>(1L, Collections.singletonList("transform1_expand")), null, @@ -168,6 +169,7 @@ public void testExpandIds() throws Exception { assertAsync(listener -> transformsConfigManager.expandTransformIds("transform1_expand,transform2_expand", PageParams.defaultParams(), + true, listener), new Tuple<>(2L, Arrays.asList("transform1_expand", "transform2_expand")), null, @@ -177,6 +179,7 @@ public void testExpandIds() throws Exception { assertAsync(listener -> transformsConfigManager.expandTransformIds("transform1*,transform2_expand,transform3_expand", PageParams.defaultParams(), + true, listener), new Tuple<>(3L, Arrays.asList("transform1_expand", "transform2_expand", "transform3_expand")), null, @@ -186,6 +189,7 @@ public void testExpandIds() throws Exception { assertAsync(listener -> transformsConfigManager.expandTransformIds("_all", PageParams.defaultParams(), + true, listener), new Tuple<>(3L, Arrays.asList("transform1_expand", "transform2_expand", "transform3_expand")), null, @@ -195,6 +199,7 @@ public void testExpandIds() throws Exception { assertAsync(listener -> transformsConfigManager.expandTransformIds("_all", new PageParams(0, 1), + true, listener), new Tuple<>(3L, Collections.singletonList("transform1_expand")), null, @@ -204,6 +209,7 @@ public void testExpandIds() throws Exception { assertAsync(listener -> transformsConfigManager.expandTransformIds("_all", new PageParams(1, 2), + true, listener), new Tuple<>(3L, Arrays.asList("transform2_expand", "transform3_expand")), null, @@ -213,6 +219,7 @@ public void testExpandIds() throws Exception { assertAsync(listener -> transformsConfigManager.expandTransformIds("unknown,unknown2", new PageParams(1, 2), + true, listener), (Tuple>)null, null, @@ -222,6 +229,20 @@ public void testExpandIds() throws Exception { equalTo(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, "unknown,unknown2"))); }); + // expand 1 id implicitly that does not exist + assertAsync(listener -> + transformsConfigManager.expandTransformIds("unknown*", + new PageParams(1, 2), + false, + listener), + (Tuple>)null, + null, + e -> { + assertThat(e, instanceOf(ResourceNotFoundException.class)); + assertThat(e.getMessage(), + equalTo(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, "unknown*"))); + }); + } public void testStateAndStats() throws InterruptedException { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json index 10769a780da55..fc89a524081c8 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json @@ -22,6 +22,11 @@ "type": "int", "required": false, "description": "specifies a max number of transforms to get, defaults to 100" + }, + "allow_no_match": { + "type": "boolean", + "required": false, + "description": "Whether to ignore if a wildcard expression matches no data frame transforms. (This includes `_all` string or when no data frame transforms have been specified)" } } }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json index cfbe22d703b7f..e04cb4d0b0637 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json @@ -22,6 +22,11 @@ "type": "number", "required": false, "description": "specifies a max number of transform stats to get, defaults to 100" + }, + "allow_no_match": { + "type": "boolean", + "required": false, + "description": "Whether to ignore if a wildcard expression matches no data frame transforms. (This includes `_all` string or when no data frame transforms have been specified)" } } }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json index 7c102bdb5a56a..0eb4452bc7091 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json @@ -22,6 +22,11 @@ "type": "time", "required": false, "description": "Controls the time to wait until the transform has stopped. Default to 30 seconds" + }, + "allow_no_match": { + "type": "boolean", + "required": false, + "description": "Whether to ignore if a wildcard expression matches no data frame transforms. (This includes `_all` string or when no data frame transforms have been specified)" } } }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml index a017da63312f7..e45dc2adf5e8d 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml @@ -22,6 +22,12 @@ setup: - match: { count: 0 } - match: { transforms: [] } + - do: + catch: missing + data_frame.get_data_frame_transform: + transform_id: "*" + allow_no_match: false + --- "Test get transform when it does not exist": - do: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml index d156344b5ad6f..9fb9227e05331 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml @@ -137,6 +137,19 @@ teardown: data_frame.stop_data_frame_transform: transform_id: "missing-transform" +--- +"Test stop missing transform by expression": + - do: + data_frame.stop_data_frame_transform: + allow_no_match: true + transform_id: "missing-transform*" + + - do: + catch: missing + data_frame.stop_data_frame_transform: + allow_no_match: false + transform_id: "missing-transform*" + --- "Test stop already stopped transform": - do: From 9e2c049830756dcca2dd29c9b6230e2b394d212d Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 26 Jun 2019 13:51:41 +0100 Subject: [PATCH 015/140] Remove unreleased 7.1.2 version constant (#43629) This was breaking BWC tests as the presence of the constant implied 7.1.2 was released --- server/src/main/java/org/elasticsearch/Version.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index ec29ddfdd2fa2..daa224c8636ea 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -51,7 +51,6 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_7_0_1 = new Version(7000199, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final Version V_7_1_0 = new Version(7010099, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final Version V_7_1_1 = new Version(7010199, org.apache.lucene.util.Version.LUCENE_8_0_0); - public static final Version V_7_1_2 = new Version(7010299, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final Version V_7_2_0 = new Version(7020099, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final Version V_7_2_1 = new Version(7020199, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final Version V_7_3_0 = new Version(7030099, org.apache.lucene.util.Version.LUCENE_8_1_0); From a520a5d761b37486441af8aeba04e0080743eb14 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 26 Jun 2019 15:36:47 +0100 Subject: [PATCH 016/140] Add prefix intervals source (#43635) This commit adds a prefix intervals source, allowing you to search for intervals that contain terms starting with a given prefix. The source can make use of the index_prefixes mapping option. Relates to #43198 --- .../query-dsl/intervals-query.asciidoc | 19 +++ .../test/search/230_interval_query.yml | 20 ++++ .../index/mapper/MappedFieldType.java | 3 +- .../index/mapper/TextFieldMapper.java | 26 +++- .../index/query/IntervalsSourceProvider.java | 111 +++++++++++++++++- .../elasticsearch/search/SearchModule.java | 2 + .../query/IntervalQueryBuilderTests.java | 36 ++++++ 7 files changed, 212 insertions(+), 5 deletions(-) diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc index 3049cb363173a..951147a21ac50 100644 --- a/docs/reference/query-dsl/intervals-query.asciidoc +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -82,6 +82,25 @@ to search across multiple fields as if they were all the same field; for example you could index the same text into stemmed and unstemmed fields, and search for stemmed tokens near unstemmed ones. +[[intervals-prefix]] +==== `prefix` + +The `prefix` rule finds terms that start with a specified prefix. The prefix will +expand to match at most 128 terms; if there are more matching terms in the index, +then an error will be returned. To avoid this limit, enable the +<> option on the field being searched. + +[horizontal] +`prefix`:: +Match terms starting with this prefix +`analyzer`:: +Which analyzer should be used to normalize the `prefix`. By default, the +search analyzer of the top-level field will be used. +`use_field`:: +If specified, then match intervals from this field rather than the top-level field. +The `prefix` will be normalized using the search analyzer from this field, unless +`analyzer` is specified separately. + [[intervals-all_of]] ==== `all_of` diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml index 46bf2cada8e4d..c5238e237e580 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml @@ -384,3 +384,23 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "4" } +--- +"Test prefix": + - skip: + version: " - 8.0.0" + reason: "TODO: change to 7.3 in backport" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cold + - prefix: + prefix: out + - match: { hits.total.value: 3 } + diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 5ef689709400d..411045abaf796 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -390,7 +390,8 @@ public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRew /** * Create an {@link IntervalsSource} to be used for proximity queries */ - public IntervalsSource intervals(String query, int max_gaps, boolean ordered, NamedAnalyzer analyzer) throws IOException { + public IntervalsSource intervals(String query, int max_gaps, boolean ordered, + NamedAnalyzer analyzer, boolean prefix) throws IOException { throw new IllegalArgumentException("Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 6906ceb113b9c..05ca08a796593 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -44,6 +44,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.intervals.Intervals; import org.apache.lucene.search.intervals.IntervalsSource; import org.apache.lucene.search.spans.FieldMaskingSpanQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; @@ -51,6 +52,7 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; @@ -403,6 +405,17 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, Quer .build(); } + public IntervalsSource intervals(BytesRef term) { + if (term.length > maxChars) { + return Intervals.prefix(term.utf8ToString()); + } + if (term.length >= minChars) { + return Intervals.fixField(name(), Intervals.term(term)); + } + String wildcardTerm = term.utf8ToString() + "?".repeat(Math.max(0, minChars - term.length)); + return Intervals.or(Intervals.fixField(name(), Intervals.wildcard(wildcardTerm)), Intervals.term(term)); + } + @Override public PrefixFieldType clone() { return new PrefixFieldType(parentField, name(), minChars, maxChars); @@ -631,10 +644,21 @@ public Query existsQuery(QueryShardContext context) { } @Override - public IntervalsSource intervals(String text, int maxGaps, boolean ordered, NamedAnalyzer analyzer) throws IOException { + public IntervalsSource intervals(String text, int maxGaps, boolean ordered, + NamedAnalyzer analyzer, boolean prefix) throws IOException { if (indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } + if (analyzer == null) { + analyzer = searchAnalyzer(); + } + if (prefix) { + BytesRef normalizedTerm = analyzer.normalize(name(), text); + if (prefixFieldType != null) { + return prefixFieldType.intervals(normalizedTerm); + } + return Intervals.prefix(normalizedTerm.utf8ToString()); // TODO make Intervals.prefix() take a BytesRef + } IntervalBuilder builder = new IntervalBuilder(name(), analyzer == null ? searchAnalyzer() : analyzer); return builder.analyzeText(text, maxGaps, ordered); } diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java index e551654af9a76..234018971ed59 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java @@ -78,9 +78,11 @@ public static IntervalsSourceProvider fromXContent(XContentParser parser) throws return Disjunction.fromXContent(parser); case "all_of": return Combine.fromXContent(parser); + case "prefix": + return Prefix.fromXContent(parser); } throw new ParsingException(parser.getTokenLocation(), - "Unknown interval type [" + parser.currentName() + "], expecting one of [match, any_of, all_of]"); + "Unknown interval type [" + parser.currentName() + "], expecting one of [match, any_of, all_of, prefix]"); } private static IntervalsSourceProvider parseInnerIntervals(XContentParser parser) throws IOException { @@ -138,10 +140,10 @@ public IntervalsSource getSource(QueryShardContext context, MappedFieldType fiel if (useField != null) { fieldType = context.fieldMapper(useField); assert fieldType != null; - source = Intervals.fixField(useField, fieldType.intervals(query, maxGaps, ordered, analyzer)); + source = Intervals.fixField(useField, fieldType.intervals(query, maxGaps, ordered, analyzer, false)); } else { - source = fieldType.intervals(query, maxGaps, ordered, analyzer); + source = fieldType.intervals(query, maxGaps, ordered, analyzer, false); } if (filter != null) { return filter.filter(source, context, fieldType); @@ -440,6 +442,109 @@ public static Combine fromXContent(XContentParser parser) { } } + public static class Prefix extends IntervalsSourceProvider { + + public static final String NAME = "prefix"; + + private final String term; + private final String analyzer; + private final String useField; + + public Prefix(String term, String analyzer, String useField) { + this.term = term; + this.analyzer = analyzer; + this.useField = useField; + } + + public Prefix(StreamInput in) throws IOException { + this.term = in.readString(); + this.analyzer = in.readOptionalString(); + this.useField = in.readOptionalString(); + } + + @Override + public IntervalsSource getSource(QueryShardContext context, MappedFieldType fieldType) throws IOException { + NamedAnalyzer analyzer = null; + if (this.analyzer != null) { + analyzer = context.getMapperService().getIndexAnalyzers().get(this.analyzer); + } + IntervalsSource source; + if (useField != null) { + fieldType = context.fieldMapper(useField); + assert fieldType != null; + source = Intervals.fixField(useField, fieldType.intervals(term, 0, false, analyzer, true)); + } + else { + source = fieldType.intervals(term, 0, false, analyzer, true); + } + return source; + } + + @Override + public void extractFields(Set fields) { + if (useField != null) { + fields.add(useField); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Prefix prefix = (Prefix) o; + return Objects.equals(term, prefix.term) && + Objects.equals(analyzer, prefix.analyzer) && + Objects.equals(useField, prefix.useField); + } + + @Override + public int hashCode() { + return Objects.hash(term, analyzer, useField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(term); + out.writeOptionalString(analyzer); + out.writeOptionalString(useField); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field("term", term); + if (analyzer != null) { + builder.field("analyzer", analyzer); + } + if (useField != null) { + builder.field("use_field", useField); + } + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { + String term = (String) args[0]; + String analyzer = (String) args[1]; + String useField = (String) args[2]; + return new Prefix(term, analyzer, useField); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("term")); + PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); + PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); + } + + public static Prefix fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + } + static class ScriptFilterSource extends FilteredIntervalsSource { final IntervalFilterScript script; diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index ade835fb33a84..5eb82854a2097 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -792,6 +792,8 @@ private void registerIntervalsSourceProviders() { IntervalsSourceProvider.Combine.NAME, IntervalsSourceProvider.Combine::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(IntervalsSourceProvider.class, IntervalsSourceProvider.Disjunction.NAME, IntervalsSourceProvider.Disjunction::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(IntervalsSourceProvider.class, + IntervalsSourceProvider.Prefix.NAME, IntervalsSourceProvider.Prefix::new)); } private void registerQuery(QuerySpec spec) { diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index e858d04e54333..7838f77cc1697 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -59,6 +59,7 @@ protected IntervalQueryBuilder doCreateTestQueryBuilder() { private static final String MASKED_FIELD = "masked_field"; private static final String NO_POSITIONS_FIELD = "no_positions_field"; + private static final String PREFIXED_FIELD = "prefixed_field"; @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { @@ -70,6 +71,10 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .field("type", "text") .field("index_options", "freqs") .endObject() + .startObject(PREFIXED_FIELD) + .field("type", "text") + .startObject("index_prefixes").endObject() + .endObject() .endObject().endObject().endObject(); mapperService.merge("_doc", @@ -385,4 +390,35 @@ public FactoryType compile(Script script, ScriptContext { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(no_positions_json); + builder1.toQuery(createShardContext()); + }); + + String prefix_json = "{ \"intervals\" : { \"" + PREFIXED_FIELD + "\": { " + + "\"prefix\" : { \"term\" : \"term\" } } } }"; + builder = (IntervalQueryBuilder) parseQuery(prefix_json); + expected = new IntervalQuery(PREFIXED_FIELD, Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.term("term"))); + assertEquals(expected, builder.toQuery(createShardContext())); + + String short_prefix_json = "{ \"intervals\" : { \"" + PREFIXED_FIELD + "\": { " + + "\"prefix\" : { \"term\" : \"t\" } } } }"; + builder = (IntervalQueryBuilder) parseQuery(short_prefix_json); + expected = new IntervalQuery(PREFIXED_FIELD, Intervals.or( + Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.wildcard("t?")), + Intervals.term("t"))); + assertEquals(expected, builder.toQuery(createShardContext())); + + } + } From f70dd1be53d34638ee5c1f21a415ae7d95c3a9a5 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 26 Jun 2019 09:50:38 -0500 Subject: [PATCH 017/140] [ML][Data Frame] fixing tag end for df doc tests (#43640) --- .../client/documentation/DataFrameTransformDocumentationIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java index 60d9a91d1c28e..1301a95267691 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java @@ -510,7 +510,7 @@ public void testGetStats() throws IOException, InterruptedException { // tag::get-data-frame-transform-stats-request-options request.setPageParams(new PageParams(0, 100)); // <1> request.setAllowNoMatch(true); // <2> - // end::get-data-frame-transform-stats-request-params + // end::get-data-frame-transform-stats-request-options { // tag::get-data-frame-transform-stats-execute From 1136a8bac8fbb45e5062882c6d046097578b27a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Wed, 26 Jun 2019 17:13:12 +0200 Subject: [PATCH 018/140] [ML] Tag destination index with data frame metadata (#43567) --- ...ransportStartDataFrameTransformAction.java | 5 +- .../dataframe/persistence/DataframeIndex.java | 83 ++++++----- .../persistence/DataframeIndexTests.java | 75 ++++++++++ .../dataframe/DataFrameAnalyticsFields.java | 7 + .../ml/dataframe/DataFrameAnalyticsIndex.java | 116 +++++++++++++++ .../dataframe/DataFrameAnalyticsManager.java | 65 +-------- .../DataFrameAnalyticsIndexTests.java | 134 ++++++++++++++++++ 7 files changed, 385 insertions(+), 100 deletions(-) create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndexTests.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java index e23e54d67b524..85dc812c5ea49 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java @@ -46,6 +46,7 @@ import org.elasticsearch.xpack.dataframe.transforms.pivot.Pivot; import java.io.IOException; +import java.time.Clock; import java.util.Collection; import java.util.Map; import java.util.function.Consumer; @@ -226,7 +227,9 @@ private void createDestinationIndex(final DataFrameTransformConfig config, final final Pivot pivot = new Pivot(config.getPivotConfig()); ActionListener> deduceMappingsListener = ActionListener.wrap( - mappings -> DataframeIndex.createDestinationIndex(client, + mappings -> DataframeIndex.createDestinationIndex( + client, + Clock.systemUTC(), config, mappings, ActionListener.wrap(r -> listener.onResponse(null), listener::onFailure)), diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java index d579ce3c73129..488e9a73b32d9 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.core.dataframe.transforms.pivot.SingleGroupSource; import java.io.IOException; +import java.time.Clock; import java.util.Map; import java.util.Map.Entry; @@ -41,8 +42,11 @@ public final class DataframeIndex { private DataframeIndex() { } - public static void createDestinationIndex(Client client, DataFrameTransformConfig transformConfig, Map mappings, - final ActionListener listener) { + public static void createDestinationIndex(Client client, + Clock clock, + DataFrameTransformConfig transformConfig, + Map mappings, + ActionListener listener) { CreateIndexRequest request = new CreateIndexRequest(transformConfig.getDestination().getIndex()); // TODO: revisit number of shards, number of replicas @@ -50,9 +54,9 @@ public static void createDestinationIndex(Client client, DataFrameTransformConfi .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")); - request.mapping(SINGLE_MAPPING_NAME, createMappingXContent(mappings, - transformConfig.getPivotConfig().getGroupConfig().getGroups(), - transformConfig.getId())); + request.mapping( + SINGLE_MAPPING_NAME, + createMappingXContent(mappings, transformConfig.getPivotConfig().getGroupConfig().getGroups(), transformConfig.getId(), clock)); client.execute(CreateIndexAction.INSTANCE, request, ActionListener.wrap(createIndexResponse -> { listener.onResponse(true); @@ -66,29 +70,13 @@ public static void createDestinationIndex(Client client, DataFrameTransformConfi private static XContentBuilder createMappingXContent(Map mappings, Map groupSources, - String id) { + String id, + Clock clock) { try { XContentBuilder builder = jsonBuilder().startObject(); builder.startObject(SINGLE_MAPPING_NAME); - addMetaData(builder, id); - builder.startObject(PROPERTIES); - for (Entry field : mappings.entrySet()) { - String fieldName = field.getKey(); - String fieldType = field.getValue(); - - builder.startObject(fieldName); - builder.field(TYPE, fieldType); - - SingleGroupSource groupSource = groupSources.get(fieldName); - if (groupSource instanceof DateHistogramGroupSource) { - String format = ((DateHistogramGroupSource) groupSource).getFormat(); - if (format != null) { - builder.field(FORMAT, DEFAULT_TIME_FORMAT + "||" + format); - } - } - builder.endObject(); - } - builder.endObject(); // properties + addProperties(builder, mappings, groupSources); + addMetaData(builder, id, clock); builder.endObject(); // _doc type return builder.endObject(); } catch (IOException e) { @@ -96,17 +84,40 @@ private static XContentBuilder createMappingXContent(Map mapping } } - private static XContentBuilder addMetaData(XContentBuilder builder, String id) throws IOException { - builder.startObject(META); - builder.field(DataFrameField.CREATED_BY, DataFrameField.DATA_FRAME_SIGNATURE); - builder.startObject(DataFrameField.META_FIELDNAME); - builder.field(DataFrameField.CREATION_DATE_MILLIS, System.currentTimeMillis()); - builder.startObject(DataFrameField.VERSION); - builder.field(DataFrameField.CREATED, Version.CURRENT); - builder.endObject(); - builder.field(DataFrameField.TRANSFORM, id); - builder.endObject(); // META_FIELDNAME - builder.endObject(); // META + private static XContentBuilder addProperties(XContentBuilder builder, + Map mappings, + Map groupSources) throws IOException { + builder.startObject(PROPERTIES); + for (Entry field : mappings.entrySet()) { + String fieldName = field.getKey(); + String fieldType = field.getValue(); + + builder.startObject(fieldName); + builder.field(TYPE, fieldType); + + SingleGroupSource groupSource = groupSources.get(fieldName); + if (groupSource instanceof DateHistogramGroupSource) { + String format = ((DateHistogramGroupSource) groupSource).getFormat(); + if (format != null) { + builder.field(FORMAT, DEFAULT_TIME_FORMAT + "||" + format); + } + } + builder.endObject(); + } + builder.endObject(); // PROPERTIES return builder; } + + private static XContentBuilder addMetaData(XContentBuilder builder, String id, Clock clock) throws IOException { + return builder.startObject(META) + .field(DataFrameField.CREATED_BY, DataFrameField.DATA_FRAME_SIGNATURE) + .startObject(DataFrameField.META_FIELDNAME) + .field(DataFrameField.CREATION_DATE_MILLIS, clock.millis()) + .startObject(DataFrameField.VERSION) + .field(DataFrameField.CREATED, Version.CURRENT) + .endObject() + .field(DataFrameField.TRANSFORM, id) + .endObject() // META_FIELDNAME + .endObject(); // META + } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndexTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndexTests.java new file mode 100644 index 0000000000000..2ff7756c82f60 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndexTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.dataframe.persistence; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfigTests; +import org.mockito.ArgumentCaptor; + +import java.io.IOException; +import java.time.Clock; +import java.time.Instant; +import java.time.ZoneId; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class DataframeIndexTests extends ESTestCase { + + private static final String TRANSFORM_ID = "some-random-transform-id"; + private static final int CURRENT_TIME_MILLIS = 123456789; + private static final String CREATED_BY = "data-frame-transform"; + + private Client client = mock(Client.class); + private Clock clock = Clock.fixed(Instant.ofEpochMilli(CURRENT_TIME_MILLIS), ZoneId.systemDefault()); + + public void testCreateDestinationIndex() throws IOException { + doAnswer( + invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(null); + return null; + }) + .when(client).execute(any(), any(), any()); + + DataframeIndex.createDestinationIndex( + client, + clock, + DataFrameTransformConfigTests.randomDataFrameTransformConfig(TRANSFORM_ID), + new HashMap<>(), + ActionListener.wrap( + value -> assertTrue(value), + e -> fail(e.getMessage()))); + + ArgumentCaptor createIndexRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); + verify(client).execute(eq(CreateIndexAction.INSTANCE), createIndexRequestCaptor.capture(), any()); + verifyNoMoreInteractions(client); + + CreateIndexRequest createIndexRequest = createIndexRequestCaptor.getValue(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, createIndexRequest.mappings().get("_doc"))) { + Map map = parser.map(); + assertThat(extractValue("_doc._meta._data_frame.transform", map), equalTo(TRANSFORM_ID)); + assertThat(extractValue("_doc._meta._data_frame.creation_date_in_millis", map), equalTo(CURRENT_TIME_MILLIS)); + assertThat(extractValue("_doc._meta.created_by", map), equalTo(CREATED_BY)); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsFields.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsFields.java index eeb3a8badce39..4ade30ae68b4e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsFields.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsFields.java @@ -9,5 +9,12 @@ public final class DataFrameAnalyticsFields { public static final String ID = "_id_copy"; + // Metadata fields + static final String CREATION_DATE_MILLIS = "creation_date_in_millis"; + static final String VERSION = "version"; + static final String CREATED = "created"; + static final String CREATED_BY = "created_by"; + static final String ANALYTICS = "analytics"; + private DataFrameAnalyticsFields() {} } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java new file mode 100644 index 0000000000000..25a2b04e27c56 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.dataframe; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexSortConfig; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; + +import java.time.Clock; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +/** + * {@link DataFrameAnalyticsIndex} class encapsulates logic for creating destination index based on source index metadata. + */ +final class DataFrameAnalyticsIndex { + + private static final String PROPERTIES = "properties"; + private static final String META = "_meta"; + + /** + * Unfortunately, getting the settings of an index include internal settings that should + * not be set explicitly. There is no way to filter those out. Thus, we have to maintain + * a list of them and filter them out manually. + */ + private static final List INTERNAL_SETTINGS = Arrays.asList( + "index.creation_date", + "index.provided_name", + "index.uuid", + "index.version.created", + "index.version.upgraded" + ); + + /** + * Creates destination index based on source index metadata. + */ + public static void createDestinationIndex(Client client, + Clock clock, + ClusterState clusterState, + DataFrameAnalyticsConfig analyticsConfig, + ActionListener listener) { + String sourceIndex = analyticsConfig.getSource().getIndex(); + Map headers = analyticsConfig.getHeaders(); + IndexMetaData sourceIndexMetaData = clusterState.getMetaData().getIndices().get(sourceIndex); + if (sourceIndexMetaData == null) { + listener.onFailure(new IndexNotFoundException(sourceIndex)); + return; + } + CreateIndexRequest createIndexRequest = + prepareCreateIndexRequest(sourceIndexMetaData, analyticsConfig.getDest().getIndex(), analyticsConfig.getId(), clock); + ClientHelper.executeWithHeadersAsync( + headers, ClientHelper.ML_ORIGIN, client, CreateIndexAction.INSTANCE, createIndexRequest, listener); + } + + private static CreateIndexRequest prepareCreateIndexRequest(IndexMetaData sourceIndexMetaData, + String destinationIndex, + String analyticsId, + Clock clock) { + // Settings + Settings.Builder settingsBuilder = Settings.builder().put(sourceIndexMetaData.getSettings()); + INTERNAL_SETTINGS.forEach(settingsBuilder::remove); + settingsBuilder.put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), DataFrameAnalyticsFields.ID); + settingsBuilder.put(IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey(), SortOrder.ASC); + Settings settings = settingsBuilder.build(); + + // Mappings + String singleMappingType = sourceIndexMetaData.getMappings().keysIt().next(); + Map mappingsAsMap = sourceIndexMetaData.getMappings().valuesIt().next().sourceAsMap(); + addProperties(mappingsAsMap); + addMetaData(mappingsAsMap, analyticsId, clock); + + return new CreateIndexRequest(destinationIndex, settings).mapping(singleMappingType, mappingsAsMap); + } + + private static void addProperties(Map mappingsAsMap) { + Map properties = getOrPutDefault(mappingsAsMap, PROPERTIES, HashMap::new); + properties.put(DataFrameAnalyticsFields.ID, Map.of("type", "keyword")); + } + + private static void addMetaData(Map mappingsAsMap, String analyticsId, Clock clock) { + Map metadata = getOrPutDefault(mappingsAsMap, META, HashMap::new); + metadata.put(DataFrameAnalyticsFields.CREATION_DATE_MILLIS, clock.millis()); + metadata.put(DataFrameAnalyticsFields.CREATED_BY, "data-frame-analytics"); + metadata.put(DataFrameAnalyticsFields.VERSION, Map.of(DataFrameAnalyticsFields.CREATED, Version.CURRENT)); + metadata.put(DataFrameAnalyticsFields.ANALYTICS, analyticsId); + } + + private static V getOrPutDefault(Map map, K key, Supplier valueSupplier) { + V value = (V) map.get(key); + if (value == null) { + value = valueSupplier.get(); + map.put(key, value); + } + return value; + } + + private DataFrameAnalyticsIndex() {} +} + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java index 37bded7c3c712..764ca08d735b9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.ml.dataframe; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -16,19 +14,13 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.script.Script; -import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; @@ -40,10 +32,7 @@ import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; import org.elasticsearch.xpack.ml.dataframe.process.AnalyticsProcessManager; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.time.Clock; import java.util.Objects; import java.util.function.Supplier; @@ -51,19 +40,6 @@ public class DataFrameAnalyticsManager { - /** - * Unfortunately, getting the settings of an index include internal settings that should - * not be set explicitly. There is no way to filter those out. Thus, we have to maintain - * a list of them and filter them out manually. - */ - private static final List INTERNAL_SETTINGS = Arrays.asList( - "index.creation_date", - "index.provided_name", - "index.uuid", - "index.version.created", - "index.version.upgraded" - ); - private final ClusterService clusterService; /** * We need a {@link NodeClient} to be get the reindexing task and be able to report progress @@ -184,7 +160,7 @@ private void reindexDataframeAndStartAnalysis(DataFrameAnalyticsTask task, DataF reindexCompletedListener::onFailure ); - createDestinationIndex(config.getSource().getIndex(), config.getDest().getIndex(), config.getHeaders(), copyIndexCreatedListener); + DataFrameAnalyticsIndex.createDestinationIndex(client, Clock.systemUTC(), clusterService.state(), config, copyIndexCreatedListener); } private void startAnalytics(DataFrameAnalyticsTask task, DataFrameAnalyticsConfig config, boolean isTaskRestarting) { @@ -214,43 +190,6 @@ private void startAnalytics(DataFrameAnalyticsTask task, DataFrameAnalyticsConfi DataFrameDataExtractorFactory.create(client, config, isTaskRestarting, dataExtractorFactoryListener); } - private void createDestinationIndex(String sourceIndex, String destinationIndex, Map headers, - ActionListener listener) { - IndexMetaData indexMetaData = clusterService.state().getMetaData().getIndices().get(sourceIndex); - if (indexMetaData == null) { - listener.onFailure(new IndexNotFoundException(sourceIndex)); - return; - } - - Settings.Builder settingsBuilder = Settings.builder().put(indexMetaData.getSettings()); - INTERNAL_SETTINGS.forEach(settingsBuilder::remove); - settingsBuilder.put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), DataFrameAnalyticsFields.ID); - settingsBuilder.put(IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey(), SortOrder.ASC); - - CreateIndexRequest createIndexRequest = new CreateIndexRequest(destinationIndex, settingsBuilder.build()); - addDestinationIndexMappings(indexMetaData, createIndexRequest); - ClientHelper.executeWithHeadersAsync(headers, - ClientHelper.ML_ORIGIN, - client, - CreateIndexAction.INSTANCE, - createIndexRequest, - listener); - } - - private static void addDestinationIndexMappings(IndexMetaData indexMetaData, CreateIndexRequest createIndexRequest) { - ImmutableOpenMap mappings = indexMetaData.getMappings(); - Map mappingsAsMap = mappings.valuesIt().next().sourceAsMap(); - - @SuppressWarnings("unchecked") - Map properties = (Map) mappingsAsMap.get("properties"); - - Map idCopyMapping = new HashMap<>(); - idCopyMapping.put("type", "keyword"); - properties.put(DataFrameAnalyticsFields.ID, idCopyMapping); - - createIndexRequest.mapping(mappings.keysIt().next(), mappingsAsMap); - } - public void stop(DataFrameAnalyticsTask task) { processManager.stop(task); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java new file mode 100644 index 0000000000000..f341622562a2e --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.dataframe; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; +import org.elasticsearch.xpack.core.ml.dataframe.analyses.OutlierDetection; +import org.mockito.ArgumentCaptor; + +import java.io.IOException; +import java.time.Clock; +import java.time.Instant; +import java.time.ZoneId; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class DataFrameAnalyticsIndexTests extends ESTestCase { + + private static final String CLUSTER_NAME = "some-cluster-name"; + + private static final String ANALYTICS_ID = "some-analytics-id"; + private static final String SOURCE_INDEX = "source-index"; + private static final String DEST_INDEX = "dest-index"; + private static final DataFrameAnalyticsConfig ANALYTICS_CONFIG = + new DataFrameAnalyticsConfig.Builder(ANALYTICS_ID) + .setSource(new DataFrameAnalyticsSource(SOURCE_INDEX, null)) + .setDest(new DataFrameAnalyticsDest(DEST_INDEX, null)) + .setAnalysis(new OutlierDetection()) + .build(); + private static final int CURRENT_TIME_MILLIS = 123456789; + private static final String CREATED_BY = "data-frame-analytics"; + + private ThreadPool threadPool = mock(ThreadPool.class); + private Client client = mock(Client.class); + private Clock clock = Clock.fixed(Instant.ofEpochMilli(123456789L), ZoneId.systemDefault()); + + public void testCreateDestinationIndex() throws IOException { + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + doAnswer( + invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(null); + return null; + }) + .when(client).execute(any(), any(), any()); + + ClusterState clusterState = + ClusterState.builder(new ClusterName(CLUSTER_NAME)) + .metaData(MetaData.builder() + .put(IndexMetaData.builder(SOURCE_INDEX) + .settings(Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) + .putMapping(new MappingMetaData("_doc", Map.of("properties", Map.of()))))) + .build(); + DataFrameAnalyticsIndex.createDestinationIndex( + client, + clock, + clusterState, + ANALYTICS_CONFIG, + ActionListener.wrap( + response -> {}, + e -> fail(e.getMessage()))); + + ArgumentCaptor createIndexRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); + verify(client, atLeastOnce()).threadPool(); + verify(client).execute(eq(CreateIndexAction.INSTANCE), createIndexRequestCaptor.capture(), any()); + verifyNoMoreInteractions(client); + + CreateIndexRequest createIndexRequest = createIndexRequestCaptor.getValue(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, createIndexRequest.mappings().get("_doc"))) { + Map map = parser.map(); + assertThat(extractValue("_doc.properties._id_copy.type", map), equalTo("keyword")); + assertThat(extractValue("_doc._meta.analytics", map), equalTo(ANALYTICS_ID)); + assertThat(extractValue("_doc._meta.creation_date_in_millis", map), equalTo(CURRENT_TIME_MILLIS)); + assertThat(extractValue("_doc._meta.created_by", map), equalTo(CREATED_BY)); + } + } + + public void testCreateDestinationIndex_IndexNotFound() { + ClusterState clusterState = + ClusterState.builder(new ClusterName(CLUSTER_NAME)) + .metaData(MetaData.builder()) + .build(); + DataFrameAnalyticsIndex.createDestinationIndex( + client, + clock, + clusterState, + ANALYTICS_CONFIG, + ActionListener.wrap( + response -> fail("IndexNotFoundException should be thrown"), + e -> { + assertThat(e, instanceOf(IndexNotFoundException.class)); + IndexNotFoundException infe = (IndexNotFoundException) e; + assertThat(infe.getIndex().getName(), equalTo(SOURCE_INDEX)); + })); + } +} From 018a80bb1a1fbdd05516d5a7988a89f64de631fc Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 26 Jun 2019 17:13:45 +0200 Subject: [PATCH 019/140] Optimize Selector Wakeups (#43515) * Use atomic boolean to guard wakeups * Don't trigger wakeups from the select loops thread itself for registering and closing channels * Don't needlessly queue writes Co-authored-by: Tim Brooks --- .../org/elasticsearch/nio/NioSelector.java | 99 +++++++++++-------- .../nio/SocketChannelContext.java | 10 +- .../elasticsearch/nio/NioSelectorTests.java | 75 +++++++++----- .../nio/SocketChannelContextTests.java | 2 +- .../transport/nio/SSLChannelContext.java | 7 +- .../transport/nio/SSLChannelContextTests.java | 6 +- 6 files changed, 115 insertions(+), 84 deletions(-) diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/NioSelector.java b/libs/nio/src/main/java/org/elasticsearch/nio/NioSelector.java index 1d0af24ae2cba..fff07f625ff13 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/NioSelector.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/NioSelector.java @@ -61,6 +61,7 @@ public class NioSelector implements Closeable { private final AtomicBoolean isClosed = new AtomicBoolean(false); private final CompletableFuture isRunningFuture = new CompletableFuture<>(); private final AtomicReference thread = new AtomicReference<>(null); + private final AtomicBoolean wokenUp = new AtomicBoolean(false); public NioSelector(EventHandler eventHandler) throws IOException { this(eventHandler, Selector.open()); @@ -153,7 +154,7 @@ void singleLoop() { preSelect(); long nanosUntilNextTask = taskScheduler.nanosUntilNextTask(System.nanoTime()); int ready; - if (nanosUntilNextTask == 0) { + if (wokenUp.getAndSet(false) || nanosUntilNextTask == 0) { ready = selector.selectNow(); } else { long millisUntilNextTask = TimeUnit.NANOSECONDS.toMillis(nanosUntilNextTask); @@ -221,13 +222,10 @@ void processKey(SelectionKey selectionKey) { if (selectionKey.isAcceptable()) { assert context instanceof ServerChannelContext : "Only server channels can receive accept events"; ServerChannelContext serverChannelContext = (ServerChannelContext) context; - int ops = selectionKey.readyOps(); - if ((ops & SelectionKey.OP_ACCEPT) != 0) { - try { - eventHandler.acceptChannel(serverChannelContext); - } catch (IOException e) { - eventHandler.acceptException(serverChannelContext, e); - } + try { + eventHandler.acceptChannel(serverChannelContext); + } catch (IOException e) { + eventHandler.acceptException(serverChannelContext, e); } } else { assert context instanceof SocketChannelContext : "Only sockets channels can receive non-accept events"; @@ -279,29 +277,36 @@ private void handleTask(Runnable task) { /** * Queues a write operation to be handled by the event loop. This can be called by any thread and is the - * api available for non-selector threads to schedule writes. + * api available for non-selector threads to schedule writes. When invoked from the selector thread the write will be executed + * right away. * * @param writeOperation to be queued */ public void queueWrite(WriteOperation writeOperation) { - queuedWrites.offer(writeOperation); - if (isOpen() == false) { - boolean wasRemoved = queuedWrites.remove(writeOperation); - if (wasRemoved) { - writeOperation.getListener().accept(null, new ClosedSelectorException()); - } + if (isOnCurrentThread()) { + writeToChannel(writeOperation); } else { - wakeup(); + queuedWrites.offer(writeOperation); + if (isOpen() == false) { + boolean wasRemoved = queuedWrites.remove(writeOperation); + if (wasRemoved) { + writeOperation.getListener().accept(null, new ClosedSelectorException()); + } + } else { + wakeup(); + } } } public void queueChannelClose(NioChannel channel) { ChannelContext context = channel.getContext(); assert context.getSelector() == this : "Must schedule a channel for closure with its selector"; - channelsToClose.offer(context); if (isOnCurrentThread() == false) { + channelsToClose.offer(context); ensureSelectorOpenForEnqueuing(channelsToClose, context); wakeup(); + } else { + closeChannel(context); } } @@ -313,9 +318,13 @@ public void queueChannelClose(NioChannel channel) { */ public void scheduleForRegistration(NioChannel channel) { ChannelContext context = channel.getContext(); - channelsToRegister.add(context); - ensureSelectorOpenForEnqueuing(channelsToRegister, context); - wakeup(); + if (isOnCurrentThread() == false) { + channelsToRegister.add(context); + ensureSelectorOpenForEnqueuing(channelsToRegister, context); + wakeup(); + } else { + registerChannel(context); + } } /** @@ -326,7 +335,7 @@ public void scheduleForRegistration(NioChannel channel) { * * @param writeOperation to be queued in a channel's buffer */ - public void writeToChannel(WriteOperation writeOperation) { + private void writeToChannel(WriteOperation writeOperation) { assertOnSelectorThread(); SocketChannelContext context = writeOperation.getChannel(); // If the channel does not currently have anything that is ready to flush, we should flush after @@ -380,8 +389,10 @@ private void cleanupPendingWrites() { } private void wakeup() { - // TODO: Do we need the wakeup optimizations that some other libraries use? - selector.wakeup(); + assert isOnCurrentThread() == false; + if (wokenUp.compareAndSet(false, true)) { + selector.wakeup(); + } } private void handleWrite(SocketChannelContext context) { @@ -414,30 +425,38 @@ private void attemptConnect(SocketChannelContext context, boolean connectEvent) private void setUpNewChannels() { ChannelContext newChannel; while ((newChannel = this.channelsToRegister.poll()) != null) { - assert newChannel.getSelector() == this : "The channel must be registered with the selector with which it was created"; - try { - if (newChannel.isOpen()) { - eventHandler.handleRegistration(newChannel); - if (newChannel instanceof SocketChannelContext) { - attemptConnect((SocketChannelContext) newChannel, false); - } - } else { - eventHandler.registrationException(newChannel, new ClosedChannelException()); + registerChannel(newChannel); + } + } + + private void registerChannel(ChannelContext newChannel) { + assert newChannel.getSelector() == this : "The channel must be registered with the selector with which it was created"; + try { + if (newChannel.isOpen()) { + eventHandler.handleRegistration(newChannel); + if (newChannel instanceof SocketChannelContext) { + attemptConnect((SocketChannelContext) newChannel, false); } - } catch (Exception e) { - eventHandler.registrationException(newChannel, e); + } else { + eventHandler.registrationException(newChannel, new ClosedChannelException()); } + } catch (Exception e) { + eventHandler.registrationException(newChannel, e); } } private void closePendingChannels() { ChannelContext channelContext; while ((channelContext = channelsToClose.poll()) != null) { - try { - eventHandler.handleClose(channelContext); - } catch (Exception e) { - eventHandler.closeException(channelContext, e); - } + closeChannel(channelContext); + } + } + + private void closeChannel(final ChannelContext channelContext) { + try { + eventHandler.handleClose(channelContext); + } catch (Exception e) { + eventHandler.closeException(channelContext, e); } } @@ -470,7 +489,7 @@ private void handleQueuedWrites() { * @param the object type */ private void ensureSelectorOpenForEnqueuing(ConcurrentLinkedQueue queue, O objectAdded) { - if (isOpen() == false && isOnCurrentThread() == false) { + if (isOpen() == false) { if (queue.remove(objectAdded)) { throw new IllegalStateException("selector is already closed"); } diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java b/libs/nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java index 21de98e096c04..f77ccb17aef39 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java @@ -129,13 +129,7 @@ public void sendMessage(Object message, BiConsumer listener) { WriteOperation writeOperation = readWriteHandler.createWriteOperation(this, message, listener); - NioSelector selector = getSelector(); - if (selector.isOnCurrentThread() == false) { - selector.queueWrite(writeOperation); - return; - } - - selector.writeToChannel(writeOperation); + getSelector().queueWrite(writeOperation); } public void queueWriteOperation(WriteOperation writeOperation) { @@ -269,7 +263,7 @@ protected int readFromChannel(InboundChannelBuffer channelBuffer) throws IOExcep // Currently we limit to 64KB. This is a trade-off which means more syscalls, in exchange for less // copying. - private final int WRITE_LIMIT = 1 << 16; + private static final int WRITE_LIMIT = 1 << 16; protected int flushToChannel(FlushOperation flushOperation) throws IOException { ByteBuffer ioBuffer = getSelector().getIoBuffer(); diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/NioSelectorTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/NioSelectorTests.java index 7a641315fe285..55d2e645cadee 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/NioSelectorTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/NioSelectorTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.nio; +import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -108,14 +110,14 @@ public void testQueueChannelForClosed() throws IOException { } @SuppressWarnings({"unchecked", "rawtypes"}) - public void testCloseException() throws IOException { + public void testCloseException() throws IOException, InterruptedException { IOException ioException = new IOException(); NioChannel channel = mock(NioChannel.class); ChannelContext context = mock(ChannelContext.class); when(channel.getContext()).thenReturn(context); when(context.getSelector()).thenReturn(selector); - selector.queueChannelClose(channel); + executeOnNewThread(() -> selector.queueChannelClose(channel)); doThrow(ioException).when(eventHandler).handleClose(context); @@ -198,9 +200,10 @@ public void testIOExceptionWhileSelect() throws IOException { verify(eventHandler).selectorException(ioException); } - public void testSelectorClosedIfOpenAndEventLoopNotRunning() throws IOException { + public void testSelectorClosedIfOpenAndEventLoopNotRunning() throws Exception { when(rawSelector.isOpen()).thenReturn(true); - selector.close(); + executeOnNewThread(() -> selector.close()); + verify(rawSelector).close(); } @@ -222,8 +225,7 @@ public void testClosedServerChannelWillNotBeRegistered() { } public void testRegisterServerChannelFailsDueToException() throws Exception { - selector.scheduleForRegistration(serverChannel); - + executeOnNewThread(() -> selector.scheduleForRegistration(serverChannel)); ClosedChannelException closedChannelException = new ClosedChannelException(); doThrow(closedChannelException).when(eventHandler).handleRegistration(serverChannelContext); @@ -242,16 +244,18 @@ public void testClosedSocketChannelWillNotBeRegistered() throws Exception { verify(eventHandler, times(0)).handleConnect(channelContext); } - public void testRegisterSocketChannelFailsDueToException() throws Exception { - selector.scheduleForRegistration(channel); + public void testRegisterSocketChannelFailsDueToException() throws InterruptedException { + executeOnNewThread(() -> { + selector.scheduleForRegistration(channel); - ClosedChannelException closedChannelException = new ClosedChannelException(); - doThrow(closedChannelException).when(eventHandler).handleRegistration(channelContext); + ClosedChannelException closedChannelException = new ClosedChannelException(); + doThrow(closedChannelException).when(eventHandler).handleRegistration(channelContext); - selector.preSelect(); + selector.preSelect(); - verify(eventHandler).registrationException(channelContext, closedChannelException); - verify(eventHandler, times(0)).handleConnect(channelContext); + verify(eventHandler).registrationException(channelContext, closedChannelException); + verify(eventHandler, times(0)).handleConnect(channelContext); + }); } public void testAcceptEvent() throws IOException { @@ -292,17 +296,17 @@ public void testSuccessfullyRegisterChannelWillAttemptConnect() throws Exception } public void testQueueWriteWhenNotRunning() throws Exception { - selector.close(); - - selector.queueWrite(new FlushReadyWrite(channelContext, buffers, listener)); - + executeOnNewThread(() -> { + selector.close(); + selector.queueWrite(new FlushReadyWrite(channelContext, buffers, listener)); + }); verify(listener).accept(isNull(Void.class), any(ClosedSelectorException.class)); } public void testQueueWriteChannelIsClosed() throws Exception { WriteOperation writeOperation = new FlushReadyWrite(channelContext, buffers, listener); - selector.queueWrite(writeOperation); + executeOnNewThread(() -> selector.queueWrite(writeOperation)); when(channelContext.isOpen()).thenReturn(false); selector.preSelect(); @@ -315,7 +319,7 @@ public void testQueueWriteSelectionKeyThrowsException() throws Exception { WriteOperation writeOperation = new FlushReadyWrite(channelContext, buffers, listener); CancelledKeyException cancelledKeyException = new CancelledKeyException(); - selector.queueWrite(writeOperation); + executeOnNewThread(() -> selector.queueWrite(writeOperation)); when(channelContext.getSelectionKey()).thenReturn(selectionKey); when(selectionKey.interestOps(anyInt())).thenThrow(cancelledKeyException); @@ -327,7 +331,7 @@ public void testQueueWriteSelectionKeyThrowsException() throws Exception { public void testQueueWriteSuccessful() throws Exception { WriteOperation writeOperation = new FlushReadyWrite(channelContext, buffers, listener); - selector.queueWrite(writeOperation); + executeOnNewThread(() -> selector.queueWrite(writeOperation)); assertTrue((selectionKey.interestOps() & SelectionKey.OP_WRITE) == 0); @@ -343,7 +347,7 @@ public void testQueueDirectlyInChannelBufferSuccessful() throws Exception { assertEquals(0, (selectionKey.interestOps() & SelectionKey.OP_WRITE)); when(channelContext.readyForFlush()).thenReturn(true); - selector.writeToChannel(writeOperation); + selector.queueWrite(writeOperation); verify(channelContext).queueWriteOperation(writeOperation); verify(eventHandler, times(0)).handleWrite(channelContext); @@ -357,7 +361,7 @@ public void testShouldFlushIfNoPendingFlushes() throws Exception { assertEquals(0, (selectionKey.interestOps() & SelectionKey.OP_WRITE)); when(channelContext.readyForFlush()).thenReturn(false); - selector.writeToChannel(writeOperation); + selector.queueWrite(writeOperation); verify(channelContext).queueWriteOperation(writeOperation); verify(eventHandler).handleWrite(channelContext); @@ -374,7 +378,7 @@ public void testQueueDirectlyInChannelBufferSelectionKeyThrowsException() throws when(channelContext.getSelectionKey()).thenReturn(selectionKey); when(channelContext.readyForFlush()).thenReturn(false); when(selectionKey.interestOps(anyInt())).thenThrow(cancelledKeyException); - selector.writeToChannel(writeOperation); + selector.queueWrite(writeOperation); verify(channelContext, times(0)).queueWriteOperation(writeOperation); verify(eventHandler, times(0)).handleWrite(channelContext); @@ -477,14 +481,17 @@ public void testWillCallPostHandleAfterChannelHandling() throws Exception { public void testCleanup() throws Exception { NioSocketChannel unregisteredChannel = mock(NioSocketChannel.class); SocketChannelContext unregisteredContext = mock(SocketChannelContext.class); + when(unregisteredContext.getSelector()).thenReturn(selector); when(unregisteredChannel.getContext()).thenReturn(unregisteredContext); - selector.scheduleForRegistration(channel); + executeOnNewThread(() -> selector.scheduleForRegistration(channel)); selector.preSelect(); - selector.queueWrite(new FlushReadyWrite(channelContext, buffers, listener)); - selector.scheduleForRegistration(unregisteredChannel); + executeOnNewThread(() -> { + selector.queueWrite(new FlushReadyWrite(channelContext, buffers, listener)); + selector.scheduleForRegistration(unregisteredChannel); + }); TestSelectionKey testSelectionKey = new TestSelectionKey(0); testSelectionKey.attach(channelContext); @@ -496,4 +503,20 @@ public void testCleanup() throws Exception { verify(eventHandler).handleClose(channelContext); verify(eventHandler).handleClose(unregisteredContext); } + + private static void executeOnNewThread(CheckedRunnable runnable) throws InterruptedException { + final Thread thread = new Thread(new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + runnable.run(); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }); + thread.start(); + thread.join(); + } } diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java index c0c203f728fda..5563ccc43063b 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java @@ -175,7 +175,7 @@ public void testSendMessageFromSameThreadIsQueuedInChannel() { when(readWriteHandler.createWriteOperation(context, buffers, listener)).thenReturn(writeOperation); context.sendMessage(buffers, listener); - verify(selector).writeToChannel(writeOpCaptor.capture()); + verify(selector).queueWrite(writeOpCaptor.capture()); WriteOperation writeOp = writeOpCaptor.getValue(); assertSame(writeOperation, writeOp); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java index 0ea50e844b618..8947447ef58d6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java @@ -180,12 +180,7 @@ public boolean selectorShouldClose() { public void closeChannel() { if (isClosing.compareAndSet(false, true)) { WriteOperation writeOperation = new CloseNotifyOperation(this); - NioSelector selector = getSelector(); - if (selector.isOnCurrentThread() == false) { - selector.queueWrite(writeOperation); - return; - } - selector.writeToChannel(writeOperation); + getSelector().queueWrite(writeOperation); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContextTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContextTests.java index ad91f05868226..7efff1c0e26b9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContextTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContextTests.java @@ -311,7 +311,7 @@ public void testCloseTimeout() { context.closeChannel(); ArgumentCaptor captor = ArgumentCaptor.forClass(WriteOperation.class); - verify(selector).writeToChannel(captor.capture()); + verify(selector).queueWrite(captor.capture()); ArgumentCaptor taskCaptor = ArgumentCaptor.forClass(Runnable.class); Runnable cancellable = mock(Runnable.class); @@ -333,7 +333,7 @@ public void testCloseTimeoutIsCancelledOnClose() throws IOException { context = new SSLChannelContext(channel, selector, exceptionHandler, sslDriver, readWriteHandler, channelBuffer); context.closeChannel(); ArgumentCaptor captor = ArgumentCaptor.forClass(WriteOperation.class); - verify(selector).writeToChannel(captor.capture()); + verify(selector).queueWrite(captor.capture()); ArgumentCaptor taskCaptor = ArgumentCaptor.forClass(Runnable.class); Runnable cancellable = mock(Runnable.class); when(nioTimer.scheduleAtRelativeTime(taskCaptor.capture(), anyLong())).thenReturn(cancellable); @@ -360,7 +360,7 @@ public void testInitiateCloseFromSameThreadSchedulesCloseNotify() throws SSLExce context.closeChannel(); ArgumentCaptor captor = ArgumentCaptor.forClass(WriteOperation.class); - verify(selector).writeToChannel(captor.capture()); + verify(selector).queueWrite(captor.capture()); context.queueWriteOperation(captor.getValue()); verify(sslDriver).initiateClose(); From 3dc0c35bf21b3056e1330a921a9475dfd74f0274 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 26 Jun 2019 16:35:14 +0100 Subject: [PATCH 020/140] [Ml Data Frame] Size the GET stats search by number of Ids requested (#43206) Set the size of the search request to the number of ids limited by 10,000 --- .../dataframe/persistence/DataFrameTransformsConfigManager.java | 1 + .../persistence/DataFrameTransformsConfigManagerTests.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java index bce929e23a7b2..5d3a664054a4b 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java @@ -341,6 +341,7 @@ public void getTransformStats(Collection transformIds, ActionListener
  • expectedStats = new ArrayList<>(); for (int i=0; i Date: Wed, 26 Jun 2019 17:40:26 +0200 Subject: [PATCH 021/140] Avoid AssertionError when closing engine (#43638) Lucene throwing an AlreadyClosedException when closing the engine is fine, and should not trigger an AssertionError. Closes #43626 --- .../index/engine/InternalEngine.java | 4 +-- .../discovery/DiskDisruptionIT.java | 1 - .../index/engine/InternalEngineTests.java | 31 +++++++++++++++++++ 3 files changed, 33 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 97d8d154ab099..d96220983c971 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -1408,7 +1408,7 @@ private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws return new DeleteResult( plan.versionOfDeletion, getPrimaryTerm(), delete.seqNo(), plan.currentlyDeleted == false); } catch (Exception ex) { - if (indexWriter.getTragicException() == null) { + if (ex instanceof AlreadyClosedException == false && indexWriter.getTragicException() == null) { throw new AssertionError("delete operation should never fail at document level", ex); } throw ex; @@ -1511,7 +1511,7 @@ private NoOpResult innerNoOp(final NoOp noOp) throws IOException { doc.add(softDeletesField); indexWriter.addDocument(doc); } catch (Exception ex) { - if (indexWriter.getTragicException() == null) { + if (ex instanceof AlreadyClosedException == false && indexWriter.getTragicException() == null) { throw new AssertionError("noop operation should never fail at document level", ex); } throw ex; diff --git a/server/src/test/java/org/elasticsearch/discovery/DiskDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/DiskDisruptionIT.java index 4ac9fb5d32e48..dac48d89e78b3 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiskDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiskDisruptionIT.java @@ -98,7 +98,6 @@ public FileChannel newFileChannel(Path path, Set options, * It simulates a full power outage by preventing translog checkpoint files to be written and restart the cluster. This means that * all un-fsynced data will be lost. */ - @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/43626") public void testGlobalCheckpointIsSafe() throws Exception { startCluster(rarely() ? 5 : 3); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 59bbee9f1bbf5..38ade1ec746cd 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -5595,6 +5595,37 @@ public void testAcquireSearcherOnClosingEngine() throws Exception { expectThrows(AlreadyClosedException.class, () -> engine.acquireSearcher("test")); } + public void testNoOpOnClosingEngine() throws Exception { + engine.close(); + Settings settings = Settings.builder() + .put(defaultSettings.getSettings()) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings( + IndexMetaData.builder(defaultSettings.getIndexMetaData()).settings(settings).build()); + assertTrue(indexSettings.isSoftDeleteEnabled()); + try (Store store = createStore(); + InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) { + engine.close(); + expectThrows(AlreadyClosedException.class, () -> engine.noOp( + new Engine.NoOp(2, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), "reason"))); + } + } + + public void testSoftDeleteOnClosingEngine() throws Exception { + engine.close(); + Settings settings = Settings.builder() + .put(defaultSettings.getSettings()) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings( + IndexMetaData.builder(defaultSettings.getIndexMetaData()).settings(settings).build()); + assertTrue(indexSettings.isSoftDeleteEnabled()); + try (Store store = createStore(); + InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) { + engine.close(); + expectThrows(AlreadyClosedException.class, () -> engine.delete(replicaDeleteForDoc("test", 42, 7, System.nanoTime()))); + } + } + public void testTrackMaxSeqNoOfUpdatesOrDeletesOnPrimary() throws Exception { engine.close(); Set liveDocIds = new HashSet<>(); From 813fa974e025d2de29ac046679b803f7deb7e92f Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 26 Jun 2019 17:49:15 +0200 Subject: [PATCH 022/140] Remove blank file Relates to #43410 --- A | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 A diff --git a/A b/A deleted file mode 100644 index e69de29bb2d1d..0000000000000 From 516b4b9e5f5677d0468b8924e247613961731373 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 26 Jun 2019 08:58:35 -0700 Subject: [PATCH 023/140] Remove non task-aware version of master and node operations (#43563) TransportNodeAction and TransportMasterNodeAction contains two variations of their abstract operation methods: one taking a Task and one that does not. This commit removes the variant that does not take a Task, eliminating the need to override that method and throw an unsupported operation exception in those cases when the task is required. --- ...ansportClusterAllocationExplainAction.java | 3 +- ...nsportAddVotingConfigExclusionsAction.java | 3 +- ...portClearVotingConfigExclusionsAction.java | 3 +- .../health/TransportClusterHealthAction.java | 7 ---- .../TransportNodesHotThreadsAction.java | 3 +- .../node/info/TransportNodesInfoAction.java | 3 +- ...nsportNodesReloadSecureSettingsAction.java | 3 +- .../node/stats/TransportNodesStatsAction.java | 3 +- .../node/usage/TransportNodesUsageAction.java | 3 +- .../TransportDeleteRepositoryAction.java | 3 +- .../get/TransportGetRepositoriesAction.java | 3 +- .../put/TransportPutRepositoryAction.java | 3 +- .../TransportVerifyRepositoryAction.java | 3 +- .../TransportClusterRerouteAction.java | 3 +- .../TransportClusterUpdateSettingsAction.java | 3 +- .../TransportClusterSearchShardsAction.java | 3 +- .../create/TransportCreateSnapshotAction.java | 5 +-- .../delete/TransportDeleteSnapshotAction.java | 3 +- .../get/TransportGetSnapshotsAction.java | 8 +++-- .../TransportRestoreSnapshotAction.java | 3 +- .../status/TransportNodesSnapshotsStatus.java | 3 +- .../TransportSnapshotsStatusAction.java | 3 +- .../state/TransportClusterStateAction.java | 3 +- .../stats/TransportClusterStatsAction.java | 3 +- .../TransportDeleteStoredScriptAction.java | 3 +- .../TransportGetStoredScriptAction.java | 3 +- .../TransportPutStoredScriptAction.java | 3 +- .../TransportPendingClusterTasksAction.java | 3 +- .../alias/TransportIndicesAliasesAction.java | 3 +- .../alias/get/TransportGetAliasesAction.java | 3 +- .../close/TransportCloseIndexAction.java | 7 ---- .../create/TransportCreateIndexAction.java | 3 +- .../delete/TransportDeleteIndexAction.java | 2 +- .../put/TransportPutMappingAction.java | 3 +- .../open/TransportOpenIndexAction.java | 2 +- .../rollover/TransportRolloverAction.java | 9 +++-- .../get/TransportGetSettingsAction.java | 4 ++- .../put/TransportUpdateSettingsAction.java | 3 +- .../TransportIndicesShardStoresAction.java | 3 +- .../indices/shrink/TransportResizeAction.java | 11 ++++-- .../TransportDeleteIndexTemplateAction.java | 3 +- .../get/TransportGetIndexTemplatesAction.java | 3 +- .../put/TransportPutIndexTemplateAction.java | 3 +- .../post/TransportUpgradeSettingsAction.java | 3 +- .../ingest/DeletePipelineTransportAction.java | 5 +-- .../ingest/GetPipelineTransportAction.java | 3 +- .../ingest/PutPipelineTransportAction.java | 3 +- .../master/TransportMasterNodeAction.java | 10 ++---- .../info/TransportClusterInfoAction.java | 4 ++- .../support/nodes/TransportNodesAction.java | 6 +--- .../TransportNodesListGatewayMetaState.java | 3 +- ...ransportNodesListGatewayStartedShards.java | 3 +- .../TransportNodesListShardStoreMetaData.java | 3 +- .../CompletionPersistentTaskAction.java | 3 +- .../RemovePersistentTaskAction.java | 3 +- .../persistent/StartPersistentTaskAction.java | 3 +- .../UpdatePersistentTaskStatusAction.java | 3 +- .../snapshots/SnapshotShardsService.java | 3 +- .../node/tasks/CancellableTasksTests.java | 5 --- .../node/tasks/TaskManagerTestCase.java | 3 +- .../cluster/node/tasks/TestTaskPlugin.java | 6 ---- .../node/tasks/TransportTasksActionTests.java | 4 +-- .../TransportRolloverActionTests.java | 36 +++++++++---------- .../settings/get/GetSettingsActionTests.java | 6 ++-- .../TransportMasterNodeActionTests.java | 5 +-- .../TransportMasterNodeActionUtils.java | 8 +++-- .../nodes/TransportNodesActionTests.java | 3 +- .../InternalOrPrivateSettingsPlugin.java | 7 ++-- .../ccr/action/TransportCcrStatsAction.java | 7 ++-- ...ransportDeleteAutoFollowPatternAction.java | 3 +- .../ccr/action/TransportFollowInfoAction.java | 3 +- .../TransportGetAutoFollowPatternAction.java | 3 +- .../action/TransportPauseFollowAction.java | 3 +- .../TransportPutAutoFollowPatternAction.java | 3 +- .../ccr/action/TransportPutFollowAction.java | 7 ++-- .../action/TransportResumeFollowAction.java | 3 +- .../ccr/action/TransportUnfollowAction.java | 7 ++-- .../ccr/CCRInfoTransportActionTests.java | 2 +- .../license/TransportDeleteLicenseAction.java | 5 +-- .../TransportGetBasicStatusAction.java | 3 +- .../license/TransportGetLicenseAction.java | 3 +- .../TransportGetTrialStatusAction.java | 3 +- .../TransportPostStartBasicAction.java | 3 +- .../TransportPostStartTrialAction.java | 3 +- .../license/TransportPutLicenseAction.java | 5 +-- .../xpack/ccr/CCRUsageTransportAction.java | 4 ++- .../action/TransportFreezeIndexAction.java | 5 --- .../action/TransportXPackUsageAction.java | 3 +- .../DataFrameUsageTransportAction.java | 4 ++- ...ansportDeleteDataFrameTransformAction.java | 4 ++- .../TransportPutDataFrameTransformAction.java | 3 +- ...ransportStartDataFrameTransformAction.java | 3 +- .../DataFrameInfoTransportActionTests.java | 2 +- .../TransportDeprecationInfoAction.java | 3 +- .../TransportNodeDeprecationCheckAction.java | 3 +- .../graph/GraphUsageTransportAction.java | 4 ++- .../graph/GraphInfoTransportActionTests.java | 4 +-- .../IndexLifecycleUsageTransportAction.java | 4 ++- .../TransportDeleteLifecycleAction.java | 3 +- .../action/TransportGetLifecycleAction.java | 3 +- .../action/TransportGetStatusAction.java | 3 +- .../action/TransportMoveToStepAction.java | 3 +- .../action/TransportPutLifecycleAction.java | 3 +- ...sportRemoveIndexLifecyclePolicyAction.java | 3 +- .../action/TransportRetryAction.java | 3 +- .../action/TransportStartILMAction.java | 3 +- .../action/TransportStopILMAction.java | 3 +- ...ndexLifecycleInfoTransportActionTests.java | 2 +- .../LogstashUsageTransportAction.java | 4 ++- .../LogstashInfoTransportActionTests.java | 4 +-- .../MachineLearningUsageTransportAction.java | 4 ++- ...ansportDeleteDataFrameAnalyticsAction.java | 4 ++- .../action/TransportDeleteDatafeedAction.java | 3 +- .../ml/action/TransportDeleteJobAction.java | 5 --- .../TransportFinalizeJobExecutionAction.java | 3 +- .../action/TransportGetDatafeedsAction.java | 3 +- .../TransportGetDatafeedsStatsAction.java | 3 +- .../ml/action/TransportGetJobsAction.java | 3 +- .../ml/action/TransportOpenJobAction.java | 4 ++- .../ml/action/TransportPutDatafeedAction.java | 2 +- .../ml/action/TransportPutJobAction.java | 2 +- .../TransportRevertModelSnapshotAction.java | 3 +- .../action/TransportSetUpgradeModeAction.java | 5 +-- ...ransportStartDataFrameAnalyticsAction.java | 3 +- .../action/TransportStartDatafeedAction.java | 3 +- .../action/TransportUpdateDatafeedAction.java | 3 +- .../ml/action/TransportUpdateJobAction.java | 4 ++- ...chineLearningInfoTransportActionTests.java | 12 +++---- ...nsportFinalizeJobExecutionActionTests.java | 2 +- .../MonitoringUsageTransportAction.java | 4 ++- .../MonitoringInfoTransportActionTests.java | 2 +- .../rollup/RollupUsageTransportAction.java | 4 ++- .../action/TransportPutRollupJobAction.java | 4 ++- .../RollupInfoTransportActionTests.java | 2 +- .../SecurityUsageTransportAction.java | 4 ++- .../realm/TransportClearRealmCacheAction.java | 3 +- .../role/TransportClearRolesCacheAction.java | 3 +- .../SecurityInfoTransportActionTests.java | 4 +-- .../xpack/sql/SqlUsageTransportAction.java | 5 ++- .../sql/plugin/TransportSqlStatsAction.java | 3 +- .../sql/SqlInfoTransportActionTests.java | 10 ++++-- .../vectors/VectorsUsageTransportAction.java | 4 ++- .../VectorsInfoTransportActionTests.java | 4 +-- .../VotingOnlyNodeFeatureSet.java | 4 ++- .../watcher/WatcherUsageTransportAction.java | 5 ++- .../TransportWatcherServiceAction.java | 3 +- .../stats/TransportWatcherStatsAction.java | 3 +- .../WatcherInfoTransportActionTests.java | 10 ++++-- .../TransportWatcherStatsActionTests.java | 4 +-- 149 files changed, 361 insertions(+), 242 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java index e3ee0dd7b1524..2fe4982acc63e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java @@ -41,6 +41,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.gateway.GatewayAllocator; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -88,7 +89,7 @@ protected ClusterAllocationExplainResponse newResponse() { } @Override - protected void masterOperation(final ClusterAllocationExplainRequest request, final ClusterState state, + protected void masterOperation(Task task, final ClusterAllocationExplainRequest request, final ClusterState state, final ActionListener listener) { final RoutingNodes routingNodes = state.getRoutingNodes(); final ClusterInfo clusterInfo = clusterInfoService.getClusterInfo(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java index 512321c7e4432..a6474510e88f3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportService; @@ -78,7 +79,7 @@ protected AddVotingConfigExclusionsResponse read(StreamInput in) throws IOExcept } @Override - protected void masterOperation(AddVotingConfigExclusionsRequest request, ClusterState state, + protected void masterOperation(Task task, AddVotingConfigExclusionsRequest request, ClusterState state, ActionListener listener) throws Exception { resolveVotingConfigExclusionsAndCheckMaximum(request, state); // throws IAE if no nodes matched or maximum exceeded diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java index 7cd19a824be05..3b441e7ea9db7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportService; @@ -72,7 +73,7 @@ protected ClearVotingConfigExclusionsResponse read(StreamInput in) throws IOExce } @Override - protected void masterOperation(ClearVotingConfigExclusionsRequest request, ClusterState initialState, + protected void masterOperation(Task task, ClearVotingConfigExclusionsRequest request, ClusterState initialState, ActionListener listener) throws Exception { final long startTimeMillis = threadPool.relativeTimeInMillis(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 0caae77d7dead..08b8730b02419 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -77,13 +77,6 @@ protected ClusterHealthResponse newResponse() { return new ClusterHealthResponse(); } - @Override - protected final void masterOperation(ClusterHealthRequest request, ClusterState state, - ActionListener listener) throws Exception { - logger.warn("attempt to execute a cluster health operation without a task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void masterOperation(Task task, final ClusterHealthRequest request, final ClusterState unusedState, final ActionListener listener) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index 6321813f189fb..178ab6c19ea62 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.monitor.jvm.HotThreads; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -64,7 +65,7 @@ protected NodeHotThreads newNodeResponse() { } @Override - protected NodeHotThreads nodeOperation(NodeRequest request) { + protected NodeHotThreads nodeOperation(NodeRequest request, Task task) { HotThreads hotThreads = new HotThreads() .busiestThreads(request.request.threads) .type(request.request.type) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index a1f9790af9351..078d428c34e11 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.node.NodeService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -66,7 +67,7 @@ protected NodeInfo newNodeResponse() { } @Override - protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest) { + protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest, Task task) { NodesInfoRequest request = nodeRequest.request; return nodeService.info(request.settings(), request.os(), request.process(), request.jvm(), request.threadPool(), request.transport(), request.http(), request.plugins(), request.ingest(), request.indices()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java index f2fef743a0d37..44abbfd85917f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ReloadablePlugin; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -78,7 +79,7 @@ protected NodesReloadSecureSettingsResponse.NodeResponse newNodeResponse() { } @Override - protected NodesReloadSecureSettingsResponse.NodeResponse nodeOperation(NodeRequest nodeReloadRequest) { + protected NodesReloadSecureSettingsResponse.NodeResponse nodeOperation(NodeRequest nodeReloadRequest, Task task) { try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configFile())) { // reread keystore from config file if (keystore == null) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 1028da916a2c1..86c3f7b983471 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.node.NodeService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -65,7 +66,7 @@ protected NodeStats newNodeResponse() { } @Override - protected NodeStats nodeOperation(NodeStatsRequest nodeStatsRequest) { + protected NodeStats nodeOperation(NodeStatsRequest nodeStatsRequest, Task task) { NodesStatsRequest request = nodeStatsRequest.request; return nodeService.stats(request.indices(), request.os(), request.process(), request.jvm(), request.threadPool(), request.fs(), request.transport(), request.http(), request.breaker(), request.script(), request.discovery(), diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java index c34ca9b614432..0a6204d28064a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.usage.UsageService; @@ -63,7 +64,7 @@ protected NodeUsage newNodeResponse() { } @Override - protected NodeUsage nodeOperation(NodeUsageRequest nodeUsageRequest) { + protected NodeUsage nodeOperation(NodeUsageRequest nodeUsageRequest, Task task) { NodesUsageRequest request = nodeUsageRequest.request; return usageService.getUsageStats(clusterService.localNode(), request.restActions()); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java index bc3c5c329ef1e..5a93c9dd6f2e0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -73,7 +74,7 @@ protected ClusterBlockException checkBlock(DeleteRepositoryRequest request, Clus } @Override - protected void masterOperation(final DeleteRepositoryRequest request, ClusterState state, + protected void masterOperation(Task task, final DeleteRepositoryRequest request, ClusterState state, final ActionListener listener) { repositoriesService.unregisterRepository( request, ActionListener.delegateFailure(listener, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java index c626a17a13ead..3f0ee69d2a5aa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.repositories.RepositoryMissingException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -78,7 +79,7 @@ protected ClusterBlockException checkBlock(GetRepositoriesRequest request, Clust } @Override - protected void masterOperation(final GetRepositoriesRequest request, ClusterState state, + protected void masterOperation(Task task, final GetRepositoriesRequest request, ClusterState state, final ActionListener listener) { MetaData metaData = state.metaData(); RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java index 9c96f3a4ab660..21f4c7e6f384d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -73,7 +74,7 @@ protected ClusterBlockException checkBlock(PutRepositoryRequest request, Cluster } @Override - protected void masterOperation(final PutRepositoryRequest request, ClusterState state, + protected void masterOperation(Task task, final PutRepositoryRequest request, ClusterState state, final ActionListener listener) { repositoriesService.registerRepository(request, ActionListener.delegateFailure(listener, (delegatedListener, response) -> delegatedListener.onResponse(new AcknowledgedResponse(response.isAcknowledged())))); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java index d4ec1d3a8bcb4..3de71e346f3c4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -66,7 +67,7 @@ protected ClusterBlockException checkBlock(VerifyRepositoryRequest request, Clus } @Override - protected void masterOperation(final VerifyRepositoryRequest request, ClusterState state, + protected void masterOperation(Task task, final VerifyRepositoryRequest request, ClusterState state, final ActionListener listener) { repositoriesService.verifyRepository(request.name(), ActionListener.delegateFailure(listener, (delegatedListener, verifyResponse) -> diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java index 0be91f6050813..eea3dafcb1eba 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -91,7 +92,7 @@ protected ClusterRerouteResponse read(StreamInput in) throws IOException { } @Override - protected void masterOperation(final ClusterRerouteRequest request, final ClusterState state, + protected void masterOperation(Task task, final ClusterRerouteRequest request, final ClusterState state, final ActionListener listener) { Map> stalePrimaryAllocations = new HashMap<>(); for (AllocationCommand command : request.getCommands().commands()) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index 0e520217e77c2..ec55a21bac4bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -93,7 +94,7 @@ protected ClusterUpdateSettingsResponse read(StreamInput in) throws IOException } @Override - protected void masterOperation(final ClusterUpdateSettingsRequest request, final ClusterState state, + protected void masterOperation(Task task, final ClusterUpdateSettingsRequest request, final ClusterState state, final ActionListener listener) { final SettingsUpdater updater = new SettingsUpdater(clusterSettings); clusterService.submitStateUpdateTask("cluster_update_settings", diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index 39006cd1e8407..23b9f34a241c3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -82,7 +83,7 @@ protected ClusterSearchShardsResponse read(StreamInput in) throws IOException { } @Override - protected void masterOperation(final ClusterSearchShardsRequest request, final ClusterState state, + protected void masterOperation(Task task, final ClusterSearchShardsRequest request, final ClusterState state, final ActionListener listener) { ClusterState clusterState = clusterService.state(); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java index 73f9a0742a719..a3de0d4f0a975 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.snapshots.SnapshotsService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -70,8 +71,8 @@ protected ClusterBlockException checkBlock(CreateSnapshotRequest request, Cluste } @Override - protected void masterOperation(final CreateSnapshotRequest request, ClusterState state, - final ActionListener listener) { + protected void masterOperation(Task task, final CreateSnapshotRequest request, ClusterState state, + final ActionListener listener) { if (request.waitForCompletion()) { snapshotsService.executeSnapshot(request, ActionListener.map(listener, CreateSnapshotResponse::new)); } else { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java index 0664a0fa75d11..3f980225f434c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.snapshots.SnapshotsService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -73,7 +74,7 @@ protected ClusterBlockException checkBlock(DeleteSnapshotRequest request, Cluste } @Override - protected void masterOperation(final DeleteSnapshotRequest request, ClusterState state, + protected void masterOperation(Task task, final DeleteSnapshotRequest request, ClusterState state, final ActionListener listener) { snapshotsService.deleteSnapshot(request.repository(), request.snapshot(), ActionListener.map(listener, v -> new AcknowledgedResponse(true)), false); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 4d2dcf92d70ec..96fe976ea0fbb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -45,7 +45,9 @@ import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotMissingException; import org.elasticsearch.snapshots.SnapshotsService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -94,11 +96,11 @@ protected ClusterBlockException checkBlock(GetSnapshotsRequest request, ClusterS } @Override - protected void masterOperation(final GetSnapshotsRequest request, final ClusterState state, + protected void masterOperation(Task task, final GetSnapshotsRequest request, final ClusterState state, final ActionListener listener) { final String[] repositories = request.repositories(); - transportService.sendRequest(transportService.getLocalNode(), GetRepositoriesAction.NAME, - new GetRepositoriesRequest(repositories), + transportService.sendChildRequest(transportService.getLocalNode(), GetRepositoriesAction.NAME, + new GetRepositoriesRequest(repositories), task, TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>( ActionListener.wrap( response -> diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index 935902432d90f..5178a5224a058 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.snapshots.RestoreService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -72,7 +73,7 @@ protected ClusterBlockException checkBlock(RestoreSnapshotRequest request, Clust } @Override - protected void masterOperation(final RestoreSnapshotRequest request, final ClusterState state, + protected void masterOperation(Task task, final RestoreSnapshotRequest request, final ClusterState state, final ActionListener listener) { restoreService.restoreSnapshot(request, ActionListener.delegateFailure(listener, (delegatedListener, restoreCompletionResponse) -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index bdb9c2fd92ebf..90eb06b6a2f5f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotShardsService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -83,7 +84,7 @@ protected NodesSnapshotStatus newResponse(Request request, List> snapshotMapBuilder = new HashMap<>(); try { final String nodeId = clusterService.localNode().getId(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 8430d1868c88d..234bc51b95b25 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -42,6 +42,7 @@ import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotMissingException; import org.elasticsearch.snapshots.SnapshotsService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -90,7 +91,7 @@ protected SnapshotsStatusResponse newResponse() { } @Override - protected void masterOperation(final SnapshotsStatusRequest request, + protected void masterOperation(Task task, final SnapshotsStatusRequest request, final ClusterState state, final ActionListener listener) throws Exception { List currentSnapshots = diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index cedca2d77e192..9669abf8e931a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -77,7 +78,7 @@ protected ClusterStateResponse newResponse() { } @Override - protected void masterOperation(final ClusterStateRequest request, final ClusterState state, + protected void masterOperation(Task task, final ClusterStateRequest request, final ClusterState state, final ActionListener listener) throws IOException { final Predicate acceptableClusterStatePredicate diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 4cf81c24fbf1a..aa09ab43419d0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.NodeService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -91,7 +92,7 @@ protected ClusterStatsNodeResponse newNodeResponse() { } @Override - protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) { + protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest, Task task) { NodeInfo nodeInfo = nodeService.info(true, true, false, true, false, true, false, true, false, false); NodeStats nodeStats = nodeService.stats(CommonStatsFlags.NONE, true, true, true, false, true, false, false, false, false, false, false, false); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java index 3e1a043a79855..86fb16c63d4d6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -65,7 +66,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(DeleteStoredScriptRequest request, ClusterState state, + protected void masterOperation(Task task, DeleteStoredScriptRequest request, ClusterState state, ActionListener listener) throws Exception { scriptService.deleteStoredScript(clusterService, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java index 19e86d1b6722d..1e67e025b61b9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -57,7 +58,7 @@ protected GetStoredScriptResponse newResponse() { } @Override - protected void masterOperation(GetStoredScriptRequest request, ClusterState state, + protected void masterOperation(Task task, GetStoredScriptRequest request, ClusterState state, ActionListener listener) throws Exception { listener.onResponse(new GetStoredScriptResponse(request.id(), scriptService.getStoredScript(state, request))); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java index d7594567f8464..ceabd31c45604 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -65,7 +66,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(PutStoredScriptRequest request, ClusterState state, + protected void masterOperation(Task task, PutStoredScriptRequest request, ClusterState state, ActionListener listener) throws Exception { scriptService.putStoredScript(clusterService, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java index cb061a25363cc..d62e7c13247bc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -65,7 +66,7 @@ protected PendingClusterTasksResponse newResponse() { } @Override - protected void masterOperation(PendingClusterTasksRequest request, ClusterState state, + protected void masterOperation(Task task, PendingClusterTasksRequest request, ClusterState state, ActionListener listener) { logger.trace("fetching pending tasks from cluster service"); final List pendingTasks = clusterService.getMasterService().pendingTasks(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index cf7904c9c3fe2..e4f460d4aa7c3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.Index; import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -104,7 +105,7 @@ protected ClusterBlockException checkBlock(IndicesAliasesRequest request, Cluste } @Override - protected void masterOperation(final IndicesAliasesRequest request, final ClusterState state, + protected void masterOperation(Task task, final IndicesAliasesRequest request, final ClusterState state, final ActionListener listener) { //Expand the indices names diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java index d1a573e6da8d7..30de49031e101 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -63,7 +64,7 @@ protected GetAliasesResponse newResponse() { } @Override - protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, GetAliasesRequest request, ClusterState state, ActionListener listener) { String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request); ImmutableOpenMap> aliases = state.metaData().findAliases(request, concreteIndices); listener.onResponse(new GetAliasesResponse(postProcess(request, concreteIndices, aliases))); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 71489e1f11b82..d653f000d0bc5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -104,13 +104,6 @@ protected ClusterBlockException checkBlock(CloseIndexRequest request, ClusterSta indexNameExpressionResolver.concreteIndexNames(state, request)); } - @Override - protected void masterOperation(final CloseIndexRequest request, - final ClusterState state, - final ActionListener listener) { - throw new UnsupportedOperationException("The task parameter is required"); - } - @Override protected void masterOperation(final Task task, final CloseIndexRequest request, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index 6cc97f3d80b1a..fdf8cc0c00c25 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -73,7 +74,7 @@ protected ClusterBlockException checkBlock(CreateIndexRequest request, ClusterSt } @Override - protected void masterOperation(final CreateIndexRequest request, final ClusterState state, + protected void masterOperation(Task task, final CreateIndexRequest request, final ClusterState state, final ActionListener listener) { String cause = request.cause(); if (cause.length() == 0) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java index c9fd558850ba1..384452ab8c945 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java @@ -89,7 +89,7 @@ protected ClusterBlockException checkBlock(DeleteIndexRequest request, ClusterSt } @Override - protected void masterOperation(final DeleteIndexRequest request, final ClusterState state, + protected void masterOperation(Task task, final DeleteIndexRequest request, final ClusterState state, final ActionListener listener) { final Set concreteIndices = new HashSet<>(Arrays.asList(indexNameExpressionResolver.concreteIndices(state, request))); if (concreteIndices.isEmpty()) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index aa519bdc3bdb3..f2a21858f5330 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -94,7 +95,7 @@ protected ClusterBlockException checkBlock(PutMappingRequest request, ClusterSta } @Override - protected void masterOperation(final PutMappingRequest request, final ClusterState state, + protected void masterOperation(Task task, final PutMappingRequest request, final ClusterState state, final ActionListener listener) { try { final Index[] concreteIndices = request.getConcreteIndex() == null ? diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index 0ab61e922b9b2..c815a6b1132d7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -88,7 +88,7 @@ protected ClusterBlockException checkBlock(OpenIndexRequest request, ClusterStat } @Override - protected void masterOperation(final OpenIndexRequest request, final ClusterState state, + protected void masterOperation(Task task, final OpenIndexRequest request, final ClusterState state, final ActionListener listener) { final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); if (concreteIndices == null || concreteIndices.length == 0) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index 4c2dbc7223d25..60f099cb1d034 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -23,6 +23,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; @@ -49,6 +51,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.shard.DocsStats; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -110,7 +113,7 @@ protected ClusterBlockException checkBlock(RolloverRequest request, ClusterState } @Override - protected void masterOperation(final RolloverRequest rolloverRequest, final ClusterState state, + protected void masterOperation(Task task, final RolloverRequest rolloverRequest, final ClusterState state, final ActionListener listener) { final MetaData metaData = state.metaData(); validate(metaData, rolloverRequest); @@ -126,7 +129,9 @@ protected void masterOperation(final RolloverRequest rolloverRequest, final Clus final String rolloverIndexName = indexNameExpressionResolver.resolveDateMathExpression(unresolvedName); MetaDataCreateIndexService.validateIndexName(rolloverIndexName, state); // will fail if the index already exists checkNoDuplicatedAliasInIndexTemplate(metaData, rolloverIndexName, rolloverRequest.getAlias()); - client.admin().indices().prepareStats(rolloverRequest.getAlias()).clear().setDocs(true).execute( + IndicesStatsRequest statsRequest = new IndicesStatsRequest().indices(rolloverRequest.getAlias()).clear().docs(true); + statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); + client.execute(IndicesStatsAction.INSTANCE, statsRequest, new ActionListener() { @Override public void onResponse(IndicesStatsResponse statsResponse) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java index 7f76439793365..fd0004beba292 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.Index; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -79,7 +80,8 @@ private static boolean isFilteredRequest(GetSettingsRequest request) { } @Override - protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, GetSettingsRequest request, ClusterState state, + ActionListener listener) { Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); ImmutableOpenMap.Builder indexToSettingsBuilder = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder indexToDefaultSettingsBuilder = ImmutableOpenMap.builder(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java index bc0f96301c086..c3192631e2cf5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.Index; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -87,7 +88,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(final UpdateSettingsRequest request, final ClusterState state, + protected void masterOperation(Task task, final UpdateSettingsRequest request, final ClusterState state, final ActionListener listener) { final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); UpdateSettingsClusterStateUpdateRequest clusterStateUpdateRequest = new UpdateSettingsClusterStateUpdateRequest() diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java index 38b11530c2d93..3967e5e31f930 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java @@ -46,6 +46,7 @@ import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards; import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards.NodeGatewayStartedShards; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -87,7 +88,7 @@ protected IndicesShardStoresResponse newResponse() { } @Override - protected void masterOperation(IndicesShardStoresRequest request, ClusterState state, + protected void masterOperation(Task task, IndicesShardStoresRequest request, ClusterState state, ActionListener listener) { final RoutingTable routingTables = state.routingTable(); final RoutingNodes routingNodes = state.getRoutingNodes(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java index b1c4d3b831443..178c15e473bd4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java @@ -24,6 +24,9 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; @@ -41,6 +44,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -97,13 +101,16 @@ protected ClusterBlockException checkBlock(ResizeRequest request, ClusterState s } @Override - protected void masterOperation(final ResizeRequest resizeRequest, final ClusterState state, + protected void masterOperation(Task task, final ResizeRequest resizeRequest, final ClusterState state, final ActionListener listener) { // there is no need to fetch docs stats for split but we keep it simple and do it anyway for simplicity of the code final String sourceIndex = indexNameExpressionResolver.resolveDateMathExpression(resizeRequest.getSourceIndex()); final String targetIndex = indexNameExpressionResolver.resolveDateMathExpression(resizeRequest.getTargetIndexRequest().index()); - client.admin().indices().prepareStats(sourceIndex).clear().setDocs(true).execute( + IndicesStatsRequestBuilder statsRequestBuilder = client.admin().indices().prepareStats(sourceIndex).clear().setDocs(true); + IndicesStatsRequest statsRequest = statsRequestBuilder.request(); + statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); + client.execute(IndicesStatsAction.INSTANCE, statsRequest, ActionListener.delegateFailure(listener, (delegatedListener, indicesStatsResponse) -> { CreateIndexClusterStateUpdateRequest updateRequest = prepareCreateIndexRequest(resizeRequest, state, i -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java index a296988019f88..c59aac00f109d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -75,7 +76,7 @@ protected ClusterBlockException checkBlock(DeleteIndexTemplateRequest request, C } @Override - protected void masterOperation(final DeleteIndexTemplateRequest request, final ClusterState state, + protected void masterOperation(Task task, final DeleteIndexTemplateRequest request, final ClusterState state, final ActionListener listener) { indexTemplateService.removeTemplates( new MetaDataIndexTemplateService diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java index cd4cb6494e92d..1d733667fc3d5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -63,7 +64,7 @@ protected GetIndexTemplatesResponse newResponse() { } @Override - protected void masterOperation(GetIndexTemplatesRequest request, ClusterState state, + protected void masterOperation(Task task, GetIndexTemplatesRequest request, ClusterState state, ActionListener listener) { List results; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java index 9351e90f9822f..95e6cf9a7d158 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -80,7 +81,7 @@ protected ClusterBlockException checkBlock(PutIndexTemplateRequest request, Clus } @Override - protected void masterOperation(final PutIndexTemplateRequest request, final ClusterState state, + protected void masterOperation(Task task, final PutIndexTemplateRequest request, final ClusterState state, final ActionListener listener) { String cause = request.cause(); if (cause.length() == 0) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java index a31c6425fb7ea..a747b6f058bc6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -73,7 +74,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(final UpgradeSettingsRequest request, final ClusterState state, + protected void masterOperation(Task task, final UpgradeSettingsRequest request, final ClusterState state, final ActionListener listener) { UpgradeSettingsClusterStateUpdateRequest clusterStateUpdateRequest = new UpgradeSettingsClusterStateUpdateRequest() .ackTimeout(request.timeout()) diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java index 647bf35a01acd..8f69d20968014 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -63,8 +64,8 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(DeletePipelineRequest request, ClusterState state, - ActionListener listener) throws Exception { + protected void masterOperation(Task task, DeletePipelineRequest request, ClusterState state, + ActionListener listener) throws Exception { ingestService.delete(request, listener); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java index e24e0a833b4c9..4501629284e63 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -53,7 +54,7 @@ protected GetPipelineResponse newResponse() { } @Override - protected void masterOperation(GetPipelineRequest request, ClusterState state, ActionListener listener) + protected void masterOperation(Task task, GetPipelineRequest request, ClusterState state, ActionListener listener) throws Exception { listener.onResponse(new GetPipelineResponse(IngestService.getPipelines(state, request.getIds()))); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index ef9158b93468a..95a9dbc7254b3 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.ingest.IngestInfo; import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -75,7 +76,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(PutPipelineRequest request, ClusterState state, ActionListener listener) + protected void masterOperation(Task task, PutPipelineRequest request, ClusterState state, ActionListener listener) throws Exception { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); nodesInfoRequest.clear(); diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 595b72f8da803..03c7346df5aab 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -118,14 +118,8 @@ protected Response read(StreamInput in) throws IOException { return response; } - protected abstract void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception; - - /** - * Override this operation if access to the task parameter is needed - */ - protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { - masterOperation(request, state, listener); - } + protected abstract void masterOperation(Task task, Request request, ClusterState state, + ActionListener listener) throws Exception; protected boolean localExecute(Request request) { return false; diff --git a/server/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java b/server/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java index dc46bfc6d1d31..0c3c70c9d02dd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -46,7 +47,8 @@ protected String executor() { } @Override - protected final void masterOperation(final Request request, final ClusterState state, final ActionListener listener) { + protected final void masterOperation(Task task, final Request request, final ClusterState state, + final ActionListener listener) { String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request); doMasterOperation(request, concreteIndices, state, listener); } diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index bc4fb5d38c328..e359d83716e15 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -123,11 +123,7 @@ protected NodesResponse newResponse(NodesRequest request, AtomicReferenceArray n protected abstract NodeResponse newNodeResponse(); - protected abstract NodeResponse nodeOperation(NodeRequest request); - - protected NodeResponse nodeOperation(NodeRequest request, Task task) { - return nodeOperation(request); - } + protected abstract NodeResponse nodeOperation(NodeRequest request, Task task); /** * resolve node ids to concrete nodes of the incoming request diff --git a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index ab0fad88ecdfa..715f117e170fa 100644 --- a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -82,7 +83,7 @@ protected NodesGatewayMetaState newResponse(Request request, List listener) { persistentTasksClusterService.completePersistentTask(request.taskId, request.allocationId, request.exception, ActionListener.delegateFailure(listener, diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index 877033fe4f33a..8f122dab0c514 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -146,7 +147,7 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) } @Override - protected final void masterOperation(final Request request, ClusterState state, + protected final void masterOperation(Task ignoredTask, final Request request, ClusterState state, final ActionListener listener) { persistentTasksClusterService.removePersistentTask( request.taskId, ActionListener.delegateFailure(listener, diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index 977239ee2b990..1b0c973a3ce40 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -213,7 +214,7 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) } @Override - protected final void masterOperation(final Request request, ClusterState state, + protected final void masterOperation(Task ignoredTask, final Request request, ClusterState state, final ActionListener listener) { persistentTasksClusterService.createPersistentTask(request.taskId, request.taskName, request.params, ActionListener.delegateFailure(listener, diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index 218154d37c9b7..22c7f74e3c8e7 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -176,7 +177,7 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) } @Override - protected final void masterOperation(final Request request, + protected final void masterOperation(Task ignoredTask, final Request request, final ClusterState state, final ActionListener listener) { persistentTasksClusterService.updatePersistentTaskState(request.taskId, request.allocationId, request.state, diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 65e1191211ec2..40ddc9a320056 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -66,6 +66,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.Repository; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequestDeduplicator; @@ -634,7 +635,7 @@ protected UpdateIndexShardSnapshotStatusResponse newResponse() { } @Override - protected void masterOperation(UpdateIndexShardSnapshotStatusRequest request, ClusterState state, + protected void masterOperation(Task task, UpdateIndexShardSnapshotStatusRequest request, ClusterState state, ActionListener listener) { innerUpdateSnapshotState(request, listener); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index c52fc58b90079..7aa1e27fc2188 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -193,11 +193,6 @@ protected NodeResponse nodeOperation(CancellableNodeRequest request, Task task) return new NodeResponse(clusterService.localNode()); } - - @Override - protected NodeResponse nodeOperation(CancellableNodeRequest request) { - throw new UnsupportedOperationException("the task parameter is required"); - } } private Task startCancellableTestNodesAction(boolean waitForActionToStart, int blockedNodesCount, ActionListener diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index ecca51c7bbb83..bfaac83d51913 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.tasks.MockTaskManager; @@ -156,7 +157,7 @@ protected NodeResponse newNodeResponse() { } @Override - protected abstract NodeResponse nodeOperation(NodeRequest request); + protected abstract NodeResponse nodeOperation(NodeRequest request, Task task); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index d39ab823dc490..70dc8bfe65d94 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -333,12 +333,6 @@ protected NodeResponse nodeOperation(NodeRequest request, Task task) { logger.info("Test task finished on the node {}", clusterService.localNode()); return new NodeResponse(clusterService.localNode()); } - - @Override - protected NodeResponse nodeOperation(NodeRequest request) { - throw new UnsupportedOperationException("the task parameter is required"); - } - } public static class TestTaskAction extends Action { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index b883d593352c2..3d8ab68d5f25a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -272,7 +272,7 @@ private Task startBlockingTestNodesAction(CountDownLatch checkLatch, NodesReques actions[i] = new TestNodesAction("internal:testAction", threadPool, testNodes[i].clusterService, testNodes[i].transportService) { @Override - protected NodeResponse nodeOperation(NodeRequest request) { + protected NodeResponse nodeOperation(NodeRequest request, Task task) { logger.info("Action on node {}", node); actionLatch.countDown(); try { @@ -527,7 +527,7 @@ public void testFailedTasksCount() throws ExecutionException, InterruptedExcepti actions[i] = new TestNodesAction("internal:testAction", threadPool, testNodes[i].clusterService, testNodes[i].transportService) { @Override - protected NodeResponse nodeOperation(NodeRequest request) { + protected NodeResponse nodeOperation(NodeRequest request, Task task) { logger.info("Action on node {}", node); throw new RuntimeException("Test exception"); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index c24e0c65e00ce..9dac4a38a36b6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -21,20 +21,19 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.IndexStats; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsTests; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; -import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasAction; @@ -45,6 +44,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataIndexAliasesService; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -71,6 +71,7 @@ import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.index.warmer.WarmerStats; import org.elasticsearch.search.suggest.completion.CompletionStats; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -92,6 +93,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -369,6 +371,9 @@ public void testRejectDuplicateAlias() { public void testConditionEvaluationWhenAliasToWriteAndReadIndicesConsidersOnlyPrimariesFromWriteIndex() { final TransportService mockTransportService = mock(TransportService.class); final ClusterService mockClusterService = mock(ClusterService.class); + final DiscoveryNode mockNode = mock(DiscoveryNode.class); + when(mockNode.getId()).thenReturn("mocknode"); + when(mockClusterService.localNode()).thenReturn(mockNode); final ThreadPool mockThreadPool = mock(ThreadPool.class); final MetaDataCreateIndexService mockCreateIndexService = mock(MetaDataCreateIndexService.class); final IndexNameExpressionResolver mockIndexNameExpressionResolver = mock(IndexNameExpressionResolver.class); @@ -377,31 +382,22 @@ public void testConditionEvaluationWhenAliasToWriteAndReadIndicesConsidersOnlyPr final MetaDataIndexAliasesService mdIndexAliasesService = mock(MetaDataIndexAliasesService.class); final Client mockClient = mock(Client.class); - final AdminClient mockAdminClient = mock(AdminClient.class); - final IndicesAdminClient mockIndicesAdminClient = mock(IndicesAdminClient.class); - when(mockClient.admin()).thenReturn(mockAdminClient); - when(mockAdminClient.indices()).thenReturn(mockIndicesAdminClient); - final IndicesStatsRequestBuilder mockIndicesStatsBuilder = mock(IndicesStatsRequestBuilder.class); - when(mockIndicesAdminClient.prepareStats(any())).thenReturn(mockIndicesStatsBuilder); final Map indexStats = new HashMap<>(); int total = randomIntBetween(500, 1000); indexStats.put("logs-index-000001", createIndexStats(200L, total)); indexStats.put("logs-index-000002", createIndexStats(300L, total)); final IndicesStatsResponse statsResponse = createAliasToMultipleIndicesStatsResponse(indexStats); - when(mockIndicesStatsBuilder.clear()).thenReturn(mockIndicesStatsBuilder); - when(mockIndicesStatsBuilder.setDocs(true)).thenReturn(mockIndicesStatsBuilder); - - assert statsResponse.getPrimaries().getDocs().getCount() == 500L; - assert statsResponse.getTotal().getDocs().getCount() == (total + total); - doAnswer(invocation -> { Object[] args = invocation.getArguments(); - assert args.length == 1; - ActionListener listener = (ActionListener) args[0]; + assert args.length == 3; + ActionListener listener = (ActionListener) args[2]; listener.onResponse(statsResponse); return null; - }).when(mockIndicesStatsBuilder).execute(any(ActionListener.class)); + }).when(mockClient).execute(eq(IndicesStatsAction.INSTANCE), any(ActionRequest.class), any(ActionListener.class)); + + assert statsResponse.getPrimaries().getDocs().getCount() == 500L; + assert statsResponse.getTotal().getDocs().getCount() == (total + total); final IndexMetaData.Builder indexMetaData = IndexMetaData.builder("logs-index-000001") .putAlias(AliasMetaData.builder("logs-alias").writeIndex(false).build()).settings(settings(Version.CURRENT)) @@ -422,7 +418,7 @@ public void testConditionEvaluationWhenAliasToWriteAndReadIndicesConsidersOnlyPr RolloverRequest rolloverRequest = new RolloverRequest("logs-alias", "logs-index-000003"); rolloverRequest.addMaxIndexDocsCondition(500L); rolloverRequest.dryRun(true); - transportRolloverAction.masterOperation(rolloverRequest, stateBefore, future); + transportRolloverAction.masterOperation(mock(Task.class), rolloverRequest, stateBefore, future); RolloverResponse response = future.actionGet(); assertThat(response.getOldIndex(), equalTo("logs-index-000002")); @@ -438,7 +434,7 @@ public void testConditionEvaluationWhenAliasToWriteAndReadIndicesConsidersOnlyPr rolloverRequest = new RolloverRequest("logs-alias", "logs-index-000003"); rolloverRequest.addMaxIndexDocsCondition(300L); rolloverRequest.dryRun(true); - transportRolloverAction.masterOperation(rolloverRequest, stateBefore, future); + transportRolloverAction.masterOperation(mock(Task.class), rolloverRequest, stateBefore, future); response = future.actionGet(); assertThat(response.getOldIndex(), equalTo("logs-index-000002")); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java index 9ec3daba19bc4..d119dcb1135c4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.Index; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; @@ -63,9 +64,10 @@ GetSettingsActionTests.this.threadPool, settingsFilter, new ActionFilters(Collec new Resolver(), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); } @Override - protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, GetSettingsRequest request, ClusterState state, + ActionListener listener) { ClusterState stateWithIndex = ClusterStateCreationUtils.state(indexName, 1, 1); - super.masterOperation(request, stateWithIndex, listener); + super.masterOperation(task, request, stateWithIndex, listener); } } diff --git a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index a42791e69f658..fc33ee7686715 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -185,7 +185,7 @@ protected Response newResponse() { } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { listener.onResponse(new Response()); // default implementation, overridden in specific tests } @@ -447,7 +447,8 @@ public void testMasterFailoverAfterStepDown() throws ExecutionException, Interru new Action( "internal:testAction", transportService, clusterService, threadPool) { @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(Task task, Request request, ClusterState state, + ActionListener listener) throws Exception { // The other node has become master, simulate failures of this node while publishing cluster state through ZenDiscovery setState(clusterService, ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); Exception failure = randomBoolean() diff --git a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionUtils.java b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionUtils.java index be4a7b29703e1..fa5b5fa33bff0 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionUtils.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionUtils.java @@ -26,13 +26,15 @@ public class TransportMasterNodeActionUtils { /** - * Allows to directly call {@link TransportMasterNodeAction#masterOperation(MasterNodeRequest, ClusterState, ActionListener)} which is - * a protected method. + * Allows to directly call + * {@link TransportMasterNodeAction#masterOperation(org.elasticsearch.tasks.Task, MasterNodeRequest, ClusterState, ActionListener)} + * which is a protected method. */ public static , Response extends ActionResponse> void runMasterOperation( TransportMasterNodeAction masterNodeAction, Request request, ClusterState clusterState, ActionListener actionListener) throws Exception { assert masterNodeAction.checkBlock(request, clusterState) == null; - masterNodeAction.masterOperation(request, clusterState, actionListener); + // TODO: pass through task here? + masterNodeAction.masterOperation(null, request, clusterState, actionListener); } } diff --git a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index 46c5b74a94365..24e9751bc413f 100644 --- a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; @@ -267,7 +268,7 @@ protected TestNodeResponse newNodeResponse() { } @Override - protected TestNodeResponse nodeOperation(TestNodeRequest request) { + protected TestNodeResponse nodeOperation(TestNodeRequest request, Task task) { return new TestNodeResponse(); } diff --git a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java index 54750933ecd90..b55253e208a91 100644 --- a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java +++ b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -153,9 +154,9 @@ protected UpdateInternalOrPrivateAction.Response newResponse() { @Override protected void masterOperation( - final UpdateInternalOrPrivateAction.Request request, - final ClusterState state, - final ActionListener listener) throws Exception { + Task task, final UpdateInternalOrPrivateAction.Request request, + final ClusterState state, + final ActionListener listener) throws Exception { clusterService.submitStateUpdateTask("update-index-internal-or-private", new ClusterStateUpdateTask() { @Override public ClusterState execute(final ClusterState currentState) throws Exception { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java index 13214b71165aa..8f14f367bc608 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java @@ -88,15 +88,16 @@ protected void doExecute(Task task, CcrStatsAction.Request request, ActionListen @Override protected void masterOperation( - CcrStatsAction.Request request, - ClusterState state, - ActionListener listener + Task task, CcrStatsAction.Request request, + ClusterState state, + ActionListener listener ) throws Exception { CheckedConsumer handler = statsResponse -> { AutoFollowStats stats = autoFollowCoordinator.getStats(); listener.onResponse(new CcrStatsAction.Response(stats, statsResponse)); }; FollowStatsAction.StatsRequest statsRequest = new FollowStatsAction.StatsRequest(); + statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); client.execute(FollowStatsAction.INSTANCE, statsRequest, ActionListener.wrap(handler, listener::onFailure)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java index b77dad83f614e..3623017ac5d35 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; @@ -57,7 +58,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(DeleteAutoFollowPatternAction.Request request, + protected void masterOperation(Task task, DeleteAutoFollowPatternAction.Request request, ClusterState state, ActionListener listener) throws Exception { clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getName(), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java index cb96b8bb29851..64782cda63615 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.Ccr; @@ -58,7 +59,7 @@ protected FollowInfoAction.Response read(StreamInput in) throws IOException { } @Override - protected void masterOperation(FollowInfoAction.Request request, + protected void masterOperation(Task task, FollowInfoAction.Request request, ClusterState state, ActionListener listener) throws Exception { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java index f3fd4149ed3e9..6359815ced443 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; @@ -57,7 +58,7 @@ protected GetAutoFollowPatternAction.Response read(StreamInput in) throws IOExce } @Override - protected void masterOperation(GetAutoFollowPatternAction.Request request, + protected void masterOperation(Task task, GetAutoFollowPatternAction.Request request, ClusterState state, ActionListener listener) throws Exception { Map autoFollowPatterns = getAutoFollowPattern(state.metaData(), request.getName()); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java index 3a3a3a82be784..12c0aec2bf3e7 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; @@ -60,7 +61,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(PauseFollowAction.Request request, + protected void masterOperation(Task task, PauseFollowAction.Request request, ClusterState state, ActionListener listener) throws Exception { PersistentTasksCustomMetaData persistentTasksMetaData = state.metaData().custom(PersistentTasksCustomMetaData.TYPE); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java index 8809f65fc3991..ee616b423f23a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; @@ -76,7 +77,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(PutAutoFollowPatternAction.Request request, + protected void masterOperation(Task task, PutAutoFollowPatternAction.Request request, ClusterState state, ActionListener listener) throws Exception { if (ccrLicenseChecker.isCcrAllowed() == false) { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 2a95c2a3c7aab..37d33b5187c43 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.snapshots.RestoreService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; @@ -94,9 +95,9 @@ protected PutFollowAction.Response read(StreamInput in) throws IOException { @Override protected void masterOperation( - final PutFollowAction.Request request, - final ClusterState state, - final ActionListener listener) { + Task task, final PutFollowAction.Request request, + final ClusterState state, + final ActionListener listener) { if (ccrLicenseChecker.isCcrAllowed() == false) { listener.onFailure(LicenseUtils.newComplianceException("ccr")); return; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index 5ee7a445605a7..5d9aa0cc8076e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -42,6 +42,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.Ccr; @@ -117,7 +118,7 @@ protected ClusterBlockException checkBlock(ResumeFollowAction.Request request, C } @Override - protected void masterOperation(final ResumeFollowAction.Request request, + protected void masterOperation(Task task, final ResumeFollowAction.Request request, ClusterState state, final ActionListener listener) throws Exception { if (ccrLicenseChecker.isCcrAllowed() == false) { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java index e6521975b157e..4e1337d884319 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.seqno.RetentionLeaseNotFoundException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.Ccr; @@ -86,9 +87,9 @@ protected AcknowledgedResponse newResponse() { @Override protected void masterOperation( - final UnfollowAction.Request request, - final ClusterState state, - final ActionListener listener) { + Task task, final UnfollowAction.Request request, + final ClusterState state, + final ActionListener listener) { clusterService.submitStateUpdateTask("unfollow_action", new ClusterStateUpdateTask() { @Override diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CCRInfoTransportActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CCRInfoTransportActionTests.java index f1d2898c237da..8c7310a0a1bf3 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CCRInfoTransportActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CCRInfoTransportActionTests.java @@ -108,7 +108,7 @@ public void testUsageStats() throws Exception { var usageAction = new CCRUsageTransportAction(mock(TransportService.class), null, null, mock(ActionFilters.class), null, Settings.EMPTY, licenseState); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, clusterState, future); + usageAction.masterOperation(null, null, clusterState, future); CCRInfoTransportAction.Usage ccrUsage = (CCRInfoTransportAction.Usage) future.get().getUsage(); assertThat(ccrUsage.enabled(), equalTo(true)); assertThat(ccrUsage.available(), equalTo(false)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java index 30f8f1e5298c5..4209a99d3330c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -58,8 +59,8 @@ protected ClusterBlockException checkBlock(DeleteLicenseRequest request, Cluster } @Override - protected void masterOperation(final DeleteLicenseRequest request, ClusterState state, final ActionListener - listener) throws ElasticsearchException { + protected void masterOperation(Task task, final DeleteLicenseRequest request, ClusterState state, + final ActionListener listener) throws ElasticsearchException { licenseService.removeLicense(request, new ActionListener() { @Override public void onResponse(ClusterStateUpdateResponse clusterStateUpdateResponse) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetBasicStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetBasicStatusAction.java index 7377daae40643..a22041c4a4125 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetBasicStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetBasicStatusAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -38,7 +39,7 @@ protected GetBasicStatusResponse newResponse() { } @Override - protected void masterOperation(GetBasicStatusRequest request, ClusterState state, + protected void masterOperation(Task task, GetBasicStatusRequest request, ClusterState state, ActionListener listener) throws Exception { LicensesMetaData licensesMetaData = state.metaData().custom(LicensesMetaData.TYPE); if (licensesMetaData == null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetLicenseAction.java index 676ae6543e765..cda8a57fdba2d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetLicenseAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -48,7 +49,7 @@ protected ClusterBlockException checkBlock(GetLicenseRequest request, ClusterSta } @Override - protected void masterOperation(final GetLicenseRequest request, ClusterState state, + protected void masterOperation(Task task, final GetLicenseRequest request, ClusterState state, final ActionListener listener) throws ElasticsearchException { listener.onResponse(new GetLicenseResponse(licenseService.getLicense())); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetTrialStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetTrialStatusAction.java index 45e04548bcaa1..f31e6977663c4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetTrialStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetTrialStatusAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -37,7 +38,7 @@ protected GetTrialStatusResponse newResponse() { } @Override - protected void masterOperation(GetTrialStatusRequest request, ClusterState state, + protected void masterOperation(Task task, GetTrialStatusRequest request, ClusterState state, ActionListener listener) throws Exception { LicensesMetaData licensesMetaData = state.metaData().custom(LicensesMetaData.TYPE); listener.onResponse(new GetTrialStatusResponse(licensesMetaData == null || licensesMetaData.isEligibleForTrial())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartBasicAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartBasicAction.java index 32a82330f52af..5368936d2e9dc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartBasicAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartBasicAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -49,7 +50,7 @@ protected PostStartBasicResponse read(StreamInput in) throws IOException { } @Override - protected void masterOperation(PostStartBasicRequest request, ClusterState state, + protected void masterOperation(Task task, PostStartBasicRequest request, ClusterState state, ActionListener listener) throws Exception { licenseService.startBasicLicense(request, listener); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java index 7e29f61ec5e2c..5a181eea4b620 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -41,7 +42,7 @@ protected PostStartTrialResponse newResponse() { } @Override - protected void masterOperation(PostStartTrialRequest request, ClusterState state, + protected void masterOperation(Task task, PostStartTrialRequest request, ClusterState state, ActionListener listener) throws Exception { licenseService.startTrialLicense(request, listener); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPutLicenseAction.java index c8505c3d05ad2..a85a13427da93 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPutLicenseAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -56,8 +57,8 @@ protected ClusterBlockException checkBlock(PutLicenseRequest request, ClusterSta } @Override - protected void masterOperation(final PutLicenseRequest request, ClusterState state, final ActionListener - listener) throws ElasticsearchException { + protected void masterOperation(Task task, final PutLicenseRequest request, ClusterState state, final ActionListener + listener) throws ElasticsearchException { licenseService.registerLicense(request, listener); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/ccr/CCRUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/ccr/CCRUsageTransportAction.java index 6e69aca503fe2..e8adbaec187ef 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/ccr/CCRUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/ccr/CCRUsageTransportAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -42,7 +43,8 @@ public CCRUsageTransportAction(TransportService transportService, ClusterService } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { MetaData metaData = state.metaData(); int numberOfFollowerIndices = 0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java index d4ee0969a6366..45e78022379e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java @@ -114,11 +114,6 @@ private Index[] resolveIndices(FreezeRequest request, ClusterState state) { return indices.toArray(Index.EMPTY_ARRAY); } - @Override - protected void masterOperation(FreezeRequest request, ClusterState state, ActionListener listener) { - throw new UnsupportedOperationException("The task parameter is required"); - } - @Override protected void masterOperation(Task task, TransportFreezeIndexAction.FreezeRequest request, ClusterState state, ActionListener listener) throws Exception { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java index 42f14ee7cba2a..7379233df3c55 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; @@ -59,7 +60,7 @@ protected XPackUsageResponse newResponse() { } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, ActionListener listener) { final ActionListener> usageActionListener = new ActionListener<>() { @Override public void onResponse(List usages) { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameUsageTransportAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameUsageTransportAction.java index 36e1050b2ef03..01e729e71bfcf 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameUsageTransportAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameUsageTransportAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -60,7 +61,8 @@ public DataFrameUsageTransportAction(TransportService transportService, ClusterS } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { boolean available = licenseState.isDataFrameAllowed(); if (enabled == false) { var usage = new DataFrameFeatureSetUsage(available, enabled, Collections.emptyMap(), diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java index 122504859bd6a..3c6a9dda611c6 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAction; @@ -59,7 +60,8 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(Task task, Request request, ClusterState state, + ActionListener listener) throws Exception { PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); if (pTasksMeta != null && pTasksMeta.getTask(request.getId()) != null) { listener.onFailure(new ElasticsearchStatusException("Cannot delete data frame [" + request.getId() + diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java index ef8a4f28d798f..fa35719be8ae1 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -101,7 +102,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(Request request, ClusterState clusterState, ActionListener listener) + protected void masterOperation(Task task, Request request, ClusterState clusterState, ActionListener listener) throws Exception { if (!licenseState.isDataFrameAllowed()) { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java index 85dc812c5ea49..6193ee4a5b88d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -94,7 +95,7 @@ protected StartDataFrameTransformAction.Response read(StreamInput in) throws IOE } @Override - protected void masterOperation(StartDataFrameTransformAction.Request request, + protected void masterOperation(Task ignoredTask, StartDataFrameTransformAction.Request request, ClusterState state, ActionListener listener) throws Exception { if (!licenseState.isDataFrameAllowed()) { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameInfoTransportActionTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameInfoTransportActionTests.java index 856c608b5dea3..8695b774a35d8 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameInfoTransportActionTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameInfoTransportActionTests.java @@ -118,7 +118,7 @@ public void testUsageDisabled() throws IOException, InterruptedException, Execut var usageAction = new DataFrameUsageTransportAction(mock(TransportService.class), null, null, mock(ActionFilters.class), null, settings.build(), licenseState, mock(Client.class)); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, mock(ClusterState.class), future); + usageAction.masterOperation(null, null, mock(ClusterState.class), future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertFalse(usage.enabled()); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java index 6556a8ad0eff5..6b55006983d0c 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -82,7 +83,7 @@ protected ClusterBlockException checkBlock(DeprecationInfoAction.Request request } @Override - protected final void masterOperation(final DeprecationInfoAction.Request request, ClusterState state, + protected final void masterOperation(Task task, final DeprecationInfoAction.Request request, ClusterState state, final ActionListener listener) { if (licenseState.isDeprecationAllowed()) { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java index a315559a2f9a7..e8f215a70d11d 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; @@ -62,7 +63,7 @@ protected NodesDeprecationCheckAction.NodeResponse newNodeResponse() { } @Override - protected NodesDeprecationCheckAction.NodeResponse nodeOperation(NodesDeprecationCheckAction.NodeRequest request) { + protected NodesDeprecationCheckAction.NodeResponse nodeOperation(NodesDeprecationCheckAction.NodeRequest request, Task task) { List issues = DeprecationInfoAction.filterChecks(DeprecationChecks.NODE_SETTINGS_CHECKS, (c) -> c.apply(settings, pluginsService.info())); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/GraphUsageTransportAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/GraphUsageTransportAction.java index f915d91524d46..15a0b44909288 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/GraphUsageTransportAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/GraphUsageTransportAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -38,7 +39,8 @@ public GraphUsageTransportAction(TransportService transportService, ClusterServi } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { GraphFeatureSetUsage usage = new GraphFeatureSetUsage(licenseState.isGraphAllowed(), XPackSettings.GRAPH_ENABLED.get(settings)); listener.onResponse(new XPackUsageFeatureResponse(usage)); diff --git a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/GraphInfoTransportActionTests.java b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/GraphInfoTransportActionTests.java index dcea451e92d36..9e99a9679333f 100644 --- a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/GraphInfoTransportActionTests.java +++ b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/GraphInfoTransportActionTests.java @@ -40,7 +40,7 @@ public void testAvailable() throws Exception { var usageAction = new GraphUsageTransportAction(mock(TransportService.class), null, null, mock(ActionFilters.class), null, Settings.EMPTY, licenseState); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertThat(usage.available(), is(available)); @@ -67,7 +67,7 @@ public void testEnabled() throws Exception { GraphUsageTransportAction usageAction = new GraphUsageTransportAction(mock(TransportService.class), null, null, mock(ActionFilters.class), null, settings.build(), licenseState); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertThat(usage.enabled(), is(enabled)); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleUsageTransportAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleUsageTransportAction.java index 0e8a9ae6f1d29..91fd744d9aabb 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleUsageTransportAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleUsageTransportAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -46,7 +47,8 @@ public IndexLifecycleUsageTransportAction(TransportService transportService, Clu } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { boolean available = licenseState.isIndexLifecycleAllowed(); MetaData metaData = state.metaData(); IndexLifecycleMetadata lifecycleMetadata = metaData.custom(IndexLifecycleMetadata.TYPE); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportDeleteLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportDeleteLifecycleAction.java index 360b14ec4b26b..c35e3495c05a5 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportDeleteLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportDeleteLifecycleAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.indexlifecycle.IndexLifecycleMetadata; @@ -64,7 +65,7 @@ protected Response read(StreamInput in) throws IOException { } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { clusterService.submitStateUpdateTask("delete-lifecycle-" + request.getPolicyName(), new AckedClusterStateUpdateTask(request, listener) { @Override diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportGetLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportGetLifecycleAction.java index 4c3657b76cba3..53fcba9cdd6d0 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportGetLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportGetLifecycleAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.indexlifecycle.IndexLifecycleMetadata; @@ -50,7 +51,7 @@ protected Response newResponse() { } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { IndexLifecycleMetadata metadata = clusterService.state().metaData().custom(IndexLifecycleMetadata.TYPE); if (metadata == null) { if (request.getPolicyNames().length == 0) { diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportGetStatusAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportGetStatusAction.java index 4a76120545087..d0a9cb0de42e3 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportGetStatusAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportGetStatusAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.indexlifecycle.IndexLifecycleMetadata; @@ -43,7 +44,7 @@ protected Response newResponse() { } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { IndexLifecycleMetadata metadata = state.metaData().custom(IndexLifecycleMetadata.TYPE); final Response response; if (metadata == null) { diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportMoveToStepAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportMoveToStepAction.java index 46f776d74faae..782baa2828b63 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportMoveToStepAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportMoveToStepAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.indexlifecycle.action.MoveToStepAction; @@ -58,7 +59,7 @@ protected Response read(StreamInput in) throws IOException { } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { IndexMetaData indexMetaData = state.metaData().index(request.getIndex()); if (indexMetaData == null) { listener.onFailure(new IllegalArgumentException("index [" + request.getIndex() + "] does not exist")); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportPutLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportPutLifecycleAction.java index 1b47ba2348cde..69c2aa0e827f8 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportPutLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportPutLifecycleAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -64,7 +65,7 @@ protected Response read(StreamInput in) throws IOException { } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { // headers from the thread context stored by the AuthenticationService to be shared between the // REST layer and the Transport layer here must be accessed within this thread and not in the // cluster state thread in the ClusterStateUpdateTask below since that thread does not share the diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportRemoveIndexLifecyclePolicyAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportRemoveIndexLifecyclePolicyAction.java index 4e608b511d3e9..03a4a7e514009 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportRemoveIndexLifecyclePolicyAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportRemoveIndexLifecyclePolicyAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.Index; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.indexlifecycle.action.RemoveIndexLifecyclePolicyAction; @@ -53,7 +54,7 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { final Index[] indices = indexNameExpressionResolver.concreteIndices(state, request.indicesOptions(), request.indices()); clusterService.submitStateUpdateTask("remove-lifecycle-for-index", new AckedClusterStateUpdateTask(request, listener) { diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportRetryAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportRetryAction.java index 181f2309062c0..5d90b388473fd 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportRetryAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportRetryAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleExecutionState; @@ -58,7 +59,7 @@ protected Response read(StreamInput in) throws IOException { } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) { clusterService.submitStateUpdateTask("ilm-re-run", new AckedClusterStateUpdateTask(request, listener) { @Override diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportStartILMAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportStartILMAction.java index ec5cf4e64a40f..b4345566a572a 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportStartILMAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportStartILMAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.indexlifecycle.OperationMode; @@ -52,7 +53,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(StartILMRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, StartILMRequest request, ClusterState state, ActionListener listener) { clusterService.submitStateUpdateTask("ilm_operation_mode_update", new AckedClusterStateUpdateTask(request, listener) { @Override diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportStopILMAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportStopILMAction.java index aa6820e1ed33e..2ffa519c9c183 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportStopILMAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/action/TransportStopILMAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.indexlifecycle.OperationMode; @@ -52,7 +53,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(StopILMRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, StopILMRequest request, ClusterState state, ActionListener listener) { clusterService.submitStateUpdateTask("ilm_operation_mode_update", new AckedClusterStateUpdateTask(request, listener) { @Override diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleInfoTransportActionTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleInfoTransportActionTests.java index 914b902a85f6f..7265c5d6ff6be 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleInfoTransportActionTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleInfoTransportActionTests.java @@ -112,7 +112,7 @@ public void testUsageStats() throws Exception { var usageAction = new IndexLifecycleUsageTransportAction(mock(TransportService.class), null, null, mock(ActionFilters.class), null, Settings.EMPTY, licenseState); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, clusterState, future); + usageAction.masterOperation(null, null, clusterState, future); IndexLifecycleFeatureSetUsage ilmUsage = (IndexLifecycleFeatureSetUsage) future.get().getUsage(); assertThat(ilmUsage.enabled(), equalTo(true)); assertThat(ilmUsage.available(), equalTo(false)); diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/LogstashUsageTransportAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/LogstashUsageTransportAction.java index 70284bcf2dc2d..55f096ec2e8e9 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/LogstashUsageTransportAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/LogstashUsageTransportAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -38,7 +39,8 @@ public LogstashUsageTransportAction(TransportService transportService, ClusterSe } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { boolean available = licenseState.isLogstashAllowed(); LogstashFeatureSetUsage usage = new LogstashFeatureSetUsage(available, XPackSettings.LOGSTASH_ENABLED.get(settings)); diff --git a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/LogstashInfoTransportActionTests.java b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/LogstashInfoTransportActionTests.java index ac6a4850e744b..42115fbf74c07 100644 --- a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/LogstashInfoTransportActionTests.java +++ b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/LogstashInfoTransportActionTests.java @@ -34,7 +34,7 @@ public void testEnabledSetting() throws Exception { LogstashUsageTransportAction usageAction = newUsageAction(settings, false); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); XPackFeatureSet.Usage usage = future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); @@ -60,7 +60,7 @@ public void testAvailable() throws Exception { var usageAction = newUsageAction(Settings.EMPTY, available); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertThat(usage.available(), is(available)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningUsageTransportAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningUsageTransportAction.java index 6b18e89413762..be9c8d4c4fd6d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningUsageTransportAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningUsageTransportAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -64,7 +65,8 @@ public MachineLearningUsageTransportAction(TransportService transportService, Cl } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { if (enabled == false) { MachineLearningFeatureSetUsage usage = new MachineLearningFeatureSetUsage(licenseState.isMachineLearningAllowed(), enabled, Collections.emptyMap(), Collections.emptyMap(), 0); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java index 7b2ef2f8302ee..dcc72bc8c75aa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -75,7 +76,7 @@ protected AcknowledgedResponse read(StreamInput in) throws IOException { } @Override - protected void masterOperation(DeleteDataFrameAnalyticsAction.Request request, ClusterState state, + protected void masterOperation(Task task, DeleteDataFrameAnalyticsAction.Request request, ClusterState state, ActionListener listener) { String id = request.getId(); PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); @@ -92,6 +93,7 @@ protected void masterOperation(DeleteDataFrameAnalyticsAction.Request request, C DeleteRequest deleteRequest = new DeleteRequest(AnomalyDetectorsIndex.configIndexName()); deleteRequest.id(DataFrameAnalyticsConfig.documentId(id)); deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + deleteRequest.setParentTask(clusterService.localNode().getId(), task.getId()); executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, deleteRequest, ActionListener.wrap( deleteResponse -> { if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java index 1a6bbfb8f2a3b..d731f1b3b08a2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -77,7 +78,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(DeleteDatafeedAction.Request request, ClusterState state, + protected void masterOperation(Task task, DeleteDatafeedAction.Request request, ClusterState state, ActionListener listener) { if (migrationEligibilityCheck.datafeedIsEligibleForMigration(request.getDatafeedId(), state)) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index c58d9cc16fab7..b40f49523c751 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -149,11 +149,6 @@ protected ClusterBlockException checkBlock(DeleteJobAction.Request request, Clus return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } - @Override - protected void masterOperation(DeleteJobAction.Request request, ClusterState state, ActionListener listener) { - throw new UnsupportedOperationException("the Task parameter is required"); - } - @Override protected void masterOperation(Task task, DeleteJobAction.Request request, ClusterState state, ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java index dd1df011758b3..24b8998763ec8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction; @@ -66,7 +67,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(FinalizeJobExecutionAction.Request request, ClusterState state, + protected void masterOperation(Task task, FinalizeJobExecutionAction.Request request, ClusterState state, ActionListener listener) { String jobIdString = String.join(",", request.getJobIds()); logger.debug("finalizing jobs [{}]", jobIdString); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java index 72ebc044364fe..89f14f717064e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; @@ -59,7 +60,7 @@ protected GetDatafeedsAction.Response newResponse() { } @Override - protected void masterOperation(GetDatafeedsAction.Request request, ClusterState state, + protected void masterOperation(Task task, GetDatafeedsAction.Request request, ClusterState state, ActionListener listener) { logger.debug("Get datafeed '{}'", request.getDatafeedId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java index 6b32d414516a0..6c90b15dab13b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -54,7 +55,7 @@ protected GetDatafeedsStatsAction.Response newResponse() { } @Override - protected void masterOperation(GetDatafeedsStatsAction.Request request, ClusterState state, + protected void masterOperation(Task task, GetDatafeedsStatsAction.Request request, ClusterState state, ActionListener listener) throws Exception { logger.debug("Get stats for datafeed '{}'", request.getDatafeedId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsAction.java index 5687198f6ec69..ba81bfac88277 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetJobsAction; @@ -44,7 +45,7 @@ protected GetJobsAction.Response newResponse() { } @Override - protected void masterOperation(GetJobsAction.Request request, ClusterState state, + protected void masterOperation(Task task, GetJobsAction.Request request, ClusterState state, ActionListener listener) { logger.debug("Get job '{}'", request.getJobId()); jobManager.expandJobs(request.getJobId(), request.allowNoJobs(), ActionListener.wrap( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index afe27ee7bbd0f..1be58fa6998ff 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -208,7 +209,8 @@ protected ClusterBlockException checkBlock(OpenJobAction.Request request, Cluste } @Override - protected void masterOperation(OpenJobAction.Request request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, OpenJobAction.Request request, ClusterState state, + ActionListener listener) { if (migrationEligibilityCheck.jobIsEligibleForMigration(request.getJobParams().getJobId(), state)) { listener.onFailure(ExceptionsHelper.configHasNotBeenMigrated("open job", request.getJobParams().getJobId())); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java index 993fe548cc952..906ecda3ccaea 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java @@ -92,7 +92,7 @@ protected PutDatafeedAction.Response newResponse() { } @Override - protected void masterOperation(PutDatafeedAction.Request request, ClusterState state, + protected void masterOperation(Task task, PutDatafeedAction.Request request, ClusterState state, ActionListener listener) { // If security is enabled only create the datafeed if the user requesting creation has // permission to read the indices the datafeed is going to read from diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java index c1244649bffca..5e17a818b98f6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java @@ -53,7 +53,7 @@ protected PutJobAction.Response newResponse() { } @Override - protected void masterOperation(PutJobAction.Request request, ClusterState state, + protected void masterOperation(Task task, PutJobAction.Request request, ClusterState state, ActionListener listener) throws Exception { jobManager.putJob(request, analysisRegistry, state, listener); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java index ab2fb1368345a..5a02cb165d9a0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -70,7 +71,7 @@ protected RevertModelSnapshotAction.Response newResponse() { } @Override - protected void masterOperation(RevertModelSnapshotAction.Request request, ClusterState state, + protected void masterOperation(Task task, RevertModelSnapshotAction.Request request, ClusterState state, ActionListener listener) { if (migrationEligibilityCheck.jobIsEligibleForMigration(request.getJobId(), state)) { listener.onFailure(ExceptionsHelper.configHasNotBeenMigrated("revert model snapshot", request.getJobId())); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java index 743ed883524e0..5d5bf5efb8d02 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; @@ -90,8 +91,8 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(SetUpgradeModeAction.Request request, ClusterState state, ActionListener listener) - throws Exception { + protected void masterOperation(Task task, SetUpgradeModeAction.Request request, ClusterState state, + ActionListener listener) throws Exception { // Don't want folks spamming this endpoint while it is in progress, only allow one request to be handled at a time if (isRunning.compareAndSet(false, true) == false) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 9011b65f8009e..d3e643b27e220 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -122,7 +123,7 @@ protected ClusterBlockException checkBlock(StartDataFrameAnalyticsAction.Request } @Override - protected void masterOperation(StartDataFrameAnalyticsAction.Request request, ClusterState state, + protected void masterOperation(Task task, StartDataFrameAnalyticsAction.Request request, ClusterState state, ActionListener listener) { if (licenseState.isMachineLearningAllowed() == false) { listener.onFailure(LicenseUtils.newComplianceException(XPackField.MACHINE_LEARNING)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java index 2da7b3291c7e1..bbb7eb3c9a969 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -146,7 +147,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(StartDatafeedAction.Request request, ClusterState state, + protected void masterOperation(Task task, StartDatafeedAction.Request request, ClusterState state, ActionListener listener) { StartDatafeedAction.DatafeedParams params = request.getParams(); if (licenseState.isMachineLearningAllowed() == false) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java index b7029eff79cd8..164c297251bbc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -64,7 +65,7 @@ protected PutDatafeedAction.Response newResponse() { } @Override - protected void masterOperation(UpdateDatafeedAction.Request request, ClusterState state, + protected void masterOperation(Task task, UpdateDatafeedAction.Request request, ClusterState state, ActionListener listener) throws Exception { if (migrationEligibilityCheck.datafeedIsEligibleForMigration(request.getUpdate().getId(), state)) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateJobAction.java index 3e9abf3ae4179..5505669d9ea4e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateJobAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.PutJobAction; @@ -44,7 +45,8 @@ protected PutJobAction.Response newResponse() { } @Override - protected void masterOperation(UpdateJobAction.Request request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, UpdateJobAction.Request request, ClusterState state, + ActionListener listener) { jobManager.updateJob(request, listener); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java index 58487d4877a60..29f348c3857b9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java @@ -111,7 +111,7 @@ public void testAvailable() throws Exception { assertThat(featureSet.available(), is(available)); var usageAction = newUsageAction(commonSettings); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, ClusterState.EMPTY_STATE, future); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertThat(usage.available(), is(available)); @@ -135,7 +135,7 @@ public void testEnabled() throws Exception { assertThat(featureSet.enabled(), is(expected)); var usageAction = newUsageAction(settings.build()); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, ClusterState.EMPTY_STATE, future); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertThat(usage.enabled(), is(expected)); @@ -168,7 +168,7 @@ public void testUsage() throws Exception { var usageAction = newUsageAction(settings.build()); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, ClusterState.EMPTY_STATE, future); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage mlUsage = future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); @@ -248,7 +248,7 @@ public void testUsageDisabledML() throws Exception { var usageAction = newUsageAction(settings.build()); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, ClusterState.EMPTY_STATE, future); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage mlUsage = future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); mlUsage.writeTo(out); @@ -270,7 +270,7 @@ public void testNodeCount() throws Exception { var usageAction = newUsageAction(settings.build()); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, clusterState, future); + usageAction.masterOperation(null, null, clusterState, future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertThat(usage.available(), is(true)); @@ -296,7 +296,7 @@ public void testUsageGivenMlMetadataNotInstalled() throws Exception { var usageAction = newUsageAction(settings.build()); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, ClusterState.EMPTY_STATE, future); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertThat(usage.available(), is(true)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionActionTests.java index fc44c520ebfc7..11fdd9a2c09a4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionActionTests.java @@ -71,7 +71,7 @@ public void testOperation() { FinalizeJobExecutionAction.Request request = new FinalizeJobExecutionAction.Request(new String[]{"job1", "job2"}); AtomicReference ack = new AtomicReference<>(); - action.masterOperation(request, clusterState, ActionListener.wrap( + action.masterOperation(null, request, clusterState, ActionListener.wrap( ack::set, e -> assertNull(e.getMessage()) )); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageTransportAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageTransportAction.java index 27fe91a08bbd3..b7e3bf98b1c06 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageTransportAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringUsageTransportAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -46,7 +47,8 @@ public MonitoringUsageTransportAction(TransportService transportService, Cluster } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { final boolean collectionEnabled = monitoringService != null && monitoringService.isMonitoringActive(); var usage = new MonitoringFeatureSetUsage(licenseState.isMonitoringAllowed(), enabled, collectionEnabled, exportersUsage(exporters)); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringInfoTransportActionTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringInfoTransportActionTests.java index bc7e9ffca5cc9..920b201bfc97b 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringInfoTransportActionTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringInfoTransportActionTests.java @@ -106,7 +106,7 @@ public void testUsage() throws Exception { mock(ActionFilters.class), null, Settings.EMPTY,licenseState, new MonitoringUsageServices(monitoring, exporters)); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); MonitoringFeatureSetUsage monitoringUsage = (MonitoringFeatureSetUsage) future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); out.setVersion(serializedVersion); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java index 74982b9bf2c3f..5253c05517e84 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -38,7 +39,8 @@ public RollupUsageTransportAction(TransportService transportService, ClusterServ } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { boolean available = licenseState.isRollupAllowed(); // TODO expose the currently running rollup tasks on this node? Unclear the best way to do that RollupFeatureSetUsage usage = diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index 6a0eae1685691..571d37e9652ba 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -43,6 +43,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; @@ -93,7 +94,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(PutRollupJobAction.Request request, ClusterState clusterState, + protected void masterOperation(Task task, PutRollupJobAction.Request request, ClusterState clusterState, ActionListener listener) { if (!licenseState.isRollupAllowed()) { @@ -107,6 +108,7 @@ protected void masterOperation(PutRollupJobAction.Request request, ClusterState FieldCapabilitiesRequest fieldCapsRequest = new FieldCapabilitiesRequest() .indices(request.getConfig().getIndexPattern()) .fields(request.getConfig().getAllFields().toArray(new String[0])); + fieldCapsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); client.fieldCaps(fieldCapsRequest, new ActionListener() { @Override diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java index a47e582adc1c9..135d67b7eaef3 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java @@ -59,7 +59,7 @@ public void testUsage() throws ExecutionException, InterruptedException, IOExcep var usageAction = new RollupUsageTransportAction(mock(TransportService.class), null, null, mock(ActionFilters.class), null, Settings.EMPTY, licenseState); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); XPackFeatureSet.Usage rollupUsage = future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); rollupUsage.writeTo(out); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java index e9d23567e062a..2b2cbe5364b4c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityUsageTransportAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -68,7 +69,8 @@ public SecurityUsageTransportAction(TransportService transportService, ClusterSe } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { Map sslUsage = sslUsage(settings); Map tokenServiceUsage = tokenServiceUsage(settings); Map apiKeyServiceUsage = apiKeyServiceUsage(settings); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java index b4ee8b677c13b..886f506a20765 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; @@ -56,7 +57,7 @@ protected ClearRealmCacheResponse.Node newNodeResponse() { } @Override - protected ClearRealmCacheResponse.Node nodeOperation(ClearRealmCacheRequest.Node nodeRequest) throws ElasticsearchException { + protected ClearRealmCacheResponse.Node nodeOperation(ClearRealmCacheRequest.Node nodeRequest, Task task) throws ElasticsearchException { if (nodeRequest.getRealms() == null || nodeRequest.getRealms().length == 0) { for (Realm realm : realms) { clearCache(realm, nodeRequest.getUsernames()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportClearRolesCacheAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportClearRolesCacheAction.java index 3c19e3deaf6a9..191d592eba4b9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportClearRolesCacheAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportClearRolesCacheAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheAction; @@ -49,7 +50,7 @@ protected ClearRolesCacheResponse.Node newNodeResponse() { } @Override - protected ClearRolesCacheResponse.Node nodeOperation(ClearRolesCacheRequest.Node request) { + protected ClearRolesCacheResponse.Node nodeOperation(ClearRolesCacheRequest.Node request, Task task) { if (request.getNames() == null || request.getNames().length == 0) { rolesStore.invalidateAll(); } else { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityInfoTransportActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityInfoTransportActionTests.java index fe89488de86a9..27b3c1ef7cc3f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityInfoTransportActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityInfoTransportActionTests.java @@ -154,7 +154,7 @@ public void testUsage() throws Exception { var usageAction = newUsageAction(settings.build()); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); SecurityFeatureSetUsage securityUsage = (SecurityFeatureSetUsage) future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); securityUsage.writeTo(out); @@ -254,7 +254,7 @@ public void testUsageOnTrialLicenseWithSecurityDisabledByDefault() throws Except var usageAction = newUsageAction(settings.build()); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); SecurityFeatureSetUsage securityUsage = (SecurityFeatureSetUsage) future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); securityUsage.writeTo(out); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlUsageTransportAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlUsageTransportAction.java index 561d1ab2cdb8b..3114f263eee29 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlUsageTransportAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlUsageTransportAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -49,11 +50,13 @@ public SqlUsageTransportAction(TransportService transportService, ClusterService } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { boolean available = licenseState.isSqlAllowed(); if (enabled) { SqlStatsRequest sqlRequest = new SqlStatsRequest(); sqlRequest.includeStats(true); + sqlRequest.setParentTask(clusterService.localNode().getId(), task.getId()); client.execute(SqlStatsAction.INSTANCE, sqlRequest, ActionListener.wrap(r -> { List countersPerNode = r.getNodes() .stream() diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlStatsAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlStatsAction.java index 815e45175e95e..21c20a1ffce3f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlStatsAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlStatsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; @@ -51,7 +52,7 @@ protected SqlStatsResponse.NodeStatsResponse newNodeResponse() { } @Override - protected SqlStatsResponse.NodeStatsResponse nodeOperation(SqlStatsRequest.NodeStatsRequest request) { + protected SqlStatsResponse.NodeStatsResponse nodeOperation(SqlStatsRequest.NodeStatsRequest request, Task task) { SqlStatsResponse.NodeStatsResponse statsResponse = new SqlStatsResponse.NodeStatsResponse(clusterService.localNode()); statsResponse.setStats(planExecutor.metrics().stats()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/SqlInfoTransportActionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/SqlInfoTransportActionTests.java index 916ffcf6e3969..fb8f63a35aaca 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/SqlInfoTransportActionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/SqlInfoTransportActionTests.java @@ -12,10 +12,12 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ObjectPath; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -102,11 +104,15 @@ public void testUsageStats() throws Exception { listener.onResponse(new SqlStatsResponse(new ClusterName("whatever"), nodes, Collections.emptyList())); return null; }).when(client).execute(eq(SqlStatsAction.INSTANCE), any(), any()); + ClusterService clusterService = mock(ClusterService.class); + final DiscoveryNode mockNode = mock(DiscoveryNode.class); + when(mockNode.getId()).thenReturn("mocknode"); + when(clusterService.localNode()).thenReturn(mockNode); - var usageAction = new SqlUsageTransportAction(mock(TransportService.class), null, null, + var usageAction = new SqlUsageTransportAction(mock(TransportService.class), clusterService, null, mock(ActionFilters.class), null, Settings.EMPTY, licenseState, client); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(mock(Task.class), null, null, future); SqlFeatureSetUsage sqlUsage = (SqlFeatureSetUsage) future.get().getUsage(); long fooBarBaz = ObjectPath.eval("foo.bar.baz", sqlUsage.stats()); diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/VectorsUsageTransportAction.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/VectorsUsageTransportAction.java index 973dfa29d247e..8c16d651f47ee 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/VectorsUsageTransportAction.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/VectorsUsageTransportAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -38,7 +39,8 @@ public VectorsUsageTransportAction(TransportService transportService, ClusterSer } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { VectorsFeatureSetUsage usage = new VectorsFeatureSetUsage(licenseState.isVectorsAllowed(), XPackSettings.VECTORS_ENABLED.get(settings)); listener.onResponse(new XPackUsageFeatureResponse(usage)); diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/VectorsInfoTransportActionTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/VectorsInfoTransportActionTests.java index fd4b3c253cd1e..2ffab717faf03 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/VectorsInfoTransportActionTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/VectorsInfoTransportActionTests.java @@ -40,7 +40,7 @@ public void testAvailable() throws Exception { var usageAction = new VectorsUsageTransportAction(mock(TransportService.class), null, null, mock(ActionFilters.class), null, Settings.EMPTY, licenseState); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertThat(usage.available(), is(available)); @@ -67,7 +67,7 @@ public void testEnabled() throws Exception { VectorsUsageTransportAction usageAction = new VectorsUsageTransportAction(mock(TransportService.class), null, null, mock(ActionFilters.class), null, settings.build(), licenseState); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(null, null, null, future); XPackFeatureSet.Usage usage = future.get().getUsage(); assertThat(usage.enabled(), is(enabled)); diff --git a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeFeatureSet.java b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeFeatureSet.java index 1d84099ff7afa..a412df53e6205 100644 --- a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeFeatureSet.java +++ b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeFeatureSet.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; @@ -63,7 +64,8 @@ public UsageTransportAction(TransportService transportService, ClusterService cl } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { final boolean available = licenseState.isVotingOnlyAllowed(); final VotingOnlyNodeFeatureSetUsage usage = new VotingOnlyNodeFeatureSetUsage(available); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java index 667692bd98c7d..231d6a7f811a2 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; @@ -52,12 +53,14 @@ public WatcherUsageTransportAction(TransportService transportService, ClusterSer } @Override - protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener listener) { + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { if (enabled) { try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(WATCHER_ORIGIN)) { WatcherStatsRequest statsRequest = new WatcherStatsRequest(); statsRequest.includeStats(true); + statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); client.execute(WatcherStatsAction.INSTANCE, statsRequest, ActionListener.wrap(r -> { List countersPerNode = r.getNodes() .stream() diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/TransportWatcherServiceAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/TransportWatcherServiceAction.java index 9e71f958675be..a992a9a3675c3 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/TransportWatcherServiceAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/TransportWatcherServiceAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackPlugin; @@ -69,7 +70,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(WatcherServiceRequest request, ClusterState state, + protected void masterOperation(Task task, WatcherServiceRequest request, ClusterState state, ActionListener listener) { switch (request.getCommand()) { case STOP: diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsAction.java index 51251e58003e8..c24d4dab87538 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.WatcherMetaData; @@ -62,7 +63,7 @@ protected WatcherStatsResponse.Node newNodeResponse() { } @Override - protected WatcherStatsResponse.Node nodeOperation(WatcherStatsRequest.Node request) { + protected WatcherStatsResponse.Node nodeOperation(WatcherStatsRequest.Node request, Task task) { WatcherStatsResponse.Node statsResponse = new WatcherStatsResponse.Node(clusterService.localNode()); statsResponse.setWatcherState(lifeCycleService.getState()); statsResponse.setThreadPoolQueueSize(executionService.executionThreadPoolQueueSize()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportActionTests.java index a64d9d60a8920..e33ee16b63dff 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportActionTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -19,6 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -111,11 +113,15 @@ public void testUsageStats() throws Exception { nodes, Collections.emptyList())); return null; }).when(client).execute(eq(WatcherStatsAction.INSTANCE), any(), any()); + ClusterService clusterService = mock(ClusterService.class); + final DiscoveryNode mockNode = mock(DiscoveryNode.class); + when(mockNode.getId()).thenReturn("mocknode"); + when(clusterService.localNode()).thenReturn(mockNode); - var usageAction = new WatcherUsageTransportAction(mock(TransportService.class), null, null, + var usageAction = new WatcherUsageTransportAction(mock(TransportService.class), clusterService, null, mock(ActionFilters.class), null, Settings.EMPTY, licenseState, client); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, future); + usageAction.masterOperation(mock(Task.class), null, null, future); WatcherFeatureSetUsage watcherUsage = (WatcherFeatureSetUsage) future.get().getUsage(); assertThat(watcherUsage.stats().keySet(), containsInAnyOrder("foo", "spam")); long fooBarBaz = ObjectPath.eval("foo.bar.baz", watcherUsage.stats()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsActionTests.java index 9a7c9f58f3d43..d15e11c54c902 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsActionTests.java @@ -85,8 +85,8 @@ public void setupTransportAction() { public void testWatcherStats() throws Exception { WatcherStatsRequest request = new WatcherStatsRequest(); request.includeStats(true); - WatcherStatsResponse.Node nodeResponse1 = action.nodeOperation(new WatcherStatsRequest.Node(request, "nodeId")); - WatcherStatsResponse.Node nodeResponse2 = action.nodeOperation(new WatcherStatsRequest.Node(request, "nodeId2")); + WatcherStatsResponse.Node nodeResponse1 = action.nodeOperation(new WatcherStatsRequest.Node(request, "nodeId"), null); + WatcherStatsResponse.Node nodeResponse2 = action.nodeOperation(new WatcherStatsRequest.Node(request, "nodeId2"), null); WatcherStatsResponse response = action.newResponse(request, Arrays.asList(nodeResponse1, nodeResponse2), Collections.emptyList()); From 7deb952068ac348068c94a8d7f2f973917a62b43 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 26 Jun 2019 12:39:29 -0500 Subject: [PATCH 024/140] [ML][Data Frame] adds tests for continuous DF (#43601) --- .../integration/DataFrameIntegTestCase.java | 28 +++-- .../integration/DataFrameTransformIT.java | 88 +++++++++++++ .../integration/DataFrameAuditorIT.java | 2 +- .../DataFrameGetAndGetStatsIT.java | 32 +++-- .../integration/DataFramePivotRestIT.java | 117 ++++++++++++++++++ .../integration/DataFrameRestTestCase.java | 101 +++++++++++---- .../integration/DataFrameUsageIT.java | 27 +++- .../transforms/DataFrameTransformTask.java | 5 +- .../test/data_frame/transforms_crud.yml | 34 +++++ .../test/data_frame/transforms_start_stop.yml | 63 ++++++++++ 10 files changed, 446 insertions(+), 51 deletions(-) diff --git a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java index fe9bc0491afa2..e79dce592eb7c 100644 --- a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java +++ b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java @@ -204,19 +204,33 @@ protected DataFrameTransformConfig createTransformConfig(String id, return createTransformConfig(id, groups, aggregations, destinationIndex, QueryBuilders.matchAllQuery(), sourceIndices); } + protected DataFrameTransformConfig.Builder createTransformConfigBuilder(String id, + Map groups, + AggregatorFactories.Builder aggregations, + String destinationIndex, + QueryBuilder queryBuilder, + String... sourceIndices) throws Exception { + return DataFrameTransformConfig.builder() + .setId(id) + .setSource(SourceConfig.builder().setIndex(sourceIndices).setQueryConfig(createQueryConfig(queryBuilder)).build()) + .setDest(DestConfig.builder().setIndex(destinationIndex).build()) + .setPivotConfig(createPivotConfig(groups, aggregations)) + .setDescription("Test data frame transform config id: " + id); + } + protected DataFrameTransformConfig createTransformConfig(String id, Map groups, AggregatorFactories.Builder aggregations, String destinationIndex, QueryBuilder queryBuilder, String... sourceIndices) throws Exception { - return DataFrameTransformConfig.builder() - .setId(id) - .setSource(SourceConfig.builder().setIndex(sourceIndices).setQueryConfig(createQueryConfig(queryBuilder)).build()) - .setDest(DestConfig.builder().setIndex(destinationIndex).build()) - .setPivotConfig(createPivotConfig(groups, aggregations)) - .setDescription("Test data frame transform config id: " + id) - .build(); + return createTransformConfigBuilder(id, groups, aggregations, destinationIndex, queryBuilder, sourceIndices).build(); + } + + protected void bulkIndexDocs(BulkRequest request) throws Exception { + RestHighLevelClient restClient = new TestRestHighLevelClient(); + BulkResponse response = restClient.bulk(request, RequestOptions.DEFAULT); + assertThat(response.buildFailureMessage(), response.hasFailures(), is(false)); } protected void createReviewsIndex(String indexName, int numDocs) throws Exception { diff --git a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java index 174a956eb3c45..ef6a65c86787c 100644 --- a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java +++ b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java @@ -7,11 +7,19 @@ package org.elasticsearch.xpack.dataframe.integration; import org.elasticsearch.Version; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.core.IndexerState; import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfig; +import org.elasticsearch.client.dataframe.transforms.DataFrameTransformTaskState; +import org.elasticsearch.client.dataframe.transforms.TimeSyncConfig; import org.elasticsearch.client.dataframe.transforms.pivot.SingleGroupSource; import org.elasticsearch.client.dataframe.transforms.pivot.TermsGroupSource; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -23,6 +31,7 @@ import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; public class DataFrameTransformIT extends DataFrameIntegTestCase { @@ -68,4 +77,83 @@ public void testDataFrameTransformCrud() throws Exception { deleteDataFrameTransform(config.getId()); } + public void testContinuousDataFrameTransformCrud() throws Exception { + String indexName = "continuous-crud-reviews"; + createReviewsIndex(indexName, 100); + + Map groups = new HashMap<>(); + groups.put("by-day", createDateHistogramGroupSourceWithCalendarInterval("timestamp", DateHistogramInterval.DAY, null, null)); + groups.put("by-user", TermsGroupSource.builder().setField("user_id").build()); + groups.put("by-business", TermsGroupSource.builder().setField("business_id").build()); + + AggregatorFactories.Builder aggs = AggregatorFactories.builder() + .addAggregator(AggregationBuilders.avg("review_score").field("stars")) + .addAggregator(AggregationBuilders.max("timestamp").field("timestamp")); + + DataFrameTransformConfig config = createTransformConfigBuilder("data-frame-transform-crud", + groups, + aggs, + "reviews-by-user-business-day", + QueryBuilders.matchAllQuery(), + indexName) + .setSyncConfig(new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1))) + .build(); + + assertTrue(putDataFrameTransform(config, RequestOptions.DEFAULT).isAcknowledged()); + assertTrue(startDataFrameTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged()); + + waitUntilCheckpoint(config.getId(), 1L); + assertThat(getDataFrameTransformStats(config.getId()).getTransformsStateAndStats().get(0).getTransformState().getTaskState(), + equalTo(DataFrameTransformTaskState.STARTED)); + + long docsIndexed = getDataFrameTransformStats(config.getId()) + .getTransformsStateAndStats() + .get(0) + .getTransformStats() + .getNumDocuments(); + + DataFrameTransformConfig storedConfig = getDataFrameTransform(config.getId()).getTransformConfigurations().get(0); + assertThat(storedConfig.getVersion(), equalTo(Version.CURRENT)); + Instant now = Instant.now(); + assertTrue("[create_time] is not before current time", storedConfig.getCreateTime().isBefore(now)); + + // index some more docs + long timeStamp = Instant.now().toEpochMilli() - 1_000; + long user = 42; + BulkRequest bulk = new BulkRequest(indexName); + for (int i = 0; i < 25; i++) { + int stars = (i + 20) % 5; + long business = (i + 100) % 50; + + StringBuilder sourceBuilder = new StringBuilder(); + sourceBuilder.append("{\"user_id\":\"") + .append("user_") + .append(user) + .append("\",\"count\":") + .append(i) + .append(",\"business_id\":\"") + .append("business_") + .append(business) + .append("\",\"stars\":") + .append(stars) + .append(",\"timestamp\":") + .append(timeStamp) + .append("}"); + bulk.add(new IndexRequest().source(sourceBuilder.toString(), XContentType.JSON)); + } + bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + bulkIndexDocs(bulk); + + waitUntilCheckpoint(config.getId(), 2L); + + // Assert that we wrote the new docs + assertThat(getDataFrameTransformStats(config.getId()) + .getTransformsStateAndStats() + .get(0) + .getTransformStats() + .getNumDocuments(), greaterThan(docsIndexed)); + + stopDataFrameTransform(config.getId()); + deleteDataFrameTransform(config.getId()); + } } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java index 24c59e0c26ece..0877fe22c78f7 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java @@ -58,7 +58,7 @@ public void testAuditorWritesAudits() throws Exception { setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, dataFrameIndex); String query = "\"match\": {\"user_id\": \"user_26\"}"; - createPivotReviewsTransform(transformId, dataFrameIndex, query, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); + createPivotReviewsTransform(transformId, dataFrameIndex, query, null, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java index e209ed72c35a4..9c425646021aa 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java @@ -61,10 +61,11 @@ public void clearOutTransforms() throws Exception { public void testGetAndGetStats() throws Exception { createPivotReviewsTransform("pivot_1", "pivot_reviews_1", null); createPivotReviewsTransform("pivot_2", "pivot_reviews_2", null); + createContinuousPivotReviewsTransform("pivot_continuous", "pivot_reviews_continuous", null); - // TODO: adjust when we support continuous startAndWaitForTransform("pivot_1", "pivot_reviews_1"); startAndWaitForTransform("pivot_2", "pivot_reviews_2"); + startAndWaitForContinuousTransform("pivot_continuous", "pivot_reviews_continuous", null); stopDataFrameTransform("pivot_1", false); stopDataFrameTransform("pivot_2", false); @@ -74,19 +75,19 @@ public void testGetAndGetStats() throws Exception { // check all the different ways to retrieve all stats Request getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "_stats", authHeader); Map stats = entityAsMap(client().performRequest(getRequest)); - assertEquals(2, XContentMapValues.extractValue("count", stats)); + assertEquals(3, XContentMapValues.extractValue("count", stats)); getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "_all/_stats", authHeader); stats = entityAsMap(client().performRequest(getRequest)); - assertEquals(2, XContentMapValues.extractValue("count", stats)); + assertEquals(3, XContentMapValues.extractValue("count", stats)); getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "*/_stats", authHeader); stats = entityAsMap(client().performRequest(getRequest)); - assertEquals(2, XContentMapValues.extractValue("count", stats)); + assertEquals(3, XContentMapValues.extractValue("count", stats)); getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "pivot_1,pivot_2/_stats", authHeader); stats = entityAsMap(client().performRequest(getRequest)); assertEquals(2, XContentMapValues.extractValue("count", stats)); getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "pivot_*/_stats", authHeader); stats = entityAsMap(client().performRequest(getRequest)); - assertEquals(2, XContentMapValues.extractValue("count", stats)); + assertEquals(3, XContentMapValues.extractValue("count", stats)); List> transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); // Verify that both transforms have valid stats @@ -109,26 +110,39 @@ public void testGetAndGetStats() throws Exception { transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); assertEquals(1, transformsStats.size()); Map state = (Map) XContentMapValues.extractValue("state", transformsStats.get(0)); - assertEquals(1, transformsStats.size()); assertEquals("stopped", XContentMapValues.extractValue("task_state", state)); assertEquals(null, XContentMapValues.extractValue("current_position", state)); assertEquals(1, XContentMapValues.extractValue("checkpoint", state)); + // only continuous + getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "pivot_continuous/_stats", authHeader); + stats = entityAsMap(client().performRequest(getRequest)); + assertEquals(1, XContentMapValues.extractValue("count", stats)); + + transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); + assertEquals(1, transformsStats.size()); + state = (Map) XContentMapValues.extractValue("state", transformsStats.get(0)); + assertEquals("started", XContentMapValues.extractValue("task_state", state)); + assertEquals(1, XContentMapValues.extractValue("checkpoint", state)); + + // check all the different ways to retrieve all transforms getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT, authHeader); Map transforms = entityAsMap(client().performRequest(getRequest)); - assertEquals(2, XContentMapValues.extractValue("count", transforms)); + assertEquals(3, XContentMapValues.extractValue("count", transforms)); getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "_all", authHeader); transforms = entityAsMap(client().performRequest(getRequest)); - assertEquals(2, XContentMapValues.extractValue("count", transforms)); + assertEquals(3, XContentMapValues.extractValue("count", transforms)); getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "*", authHeader); transforms = entityAsMap(client().performRequest(getRequest)); - assertEquals(2, XContentMapValues.extractValue("count", transforms)); + assertEquals(3, XContentMapValues.extractValue("count", transforms)); // only pivot_1 getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "pivot_1", authHeader); transforms = entityAsMap(client().performRequest(getRequest)); assertEquals(1, XContentMapValues.extractValue("count", transforms)); + + stopDataFrameTransform("pivot_continuous", false); } @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index ff384b7c0fb7c..85457307fc84f 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -11,6 +11,7 @@ import org.junit.Before; import java.io.IOException; +import java.time.Instant; import java.util.Arrays; import java.util.HashSet; import java.util.List; @@ -20,6 +21,8 @@ import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; public class DataFramePivotRestIT extends DataFrameRestTestCase { @@ -127,6 +130,120 @@ public void testPivotWithPipeline() throws Exception { assertThat(actual, equalTo(pipelineValue)); } + public void testContinuousPivot() throws Exception { + String indexName = "continuous_reviews"; + createReviewsIndex(indexName); + String transformId = "simple_continuous_pivot"; + String dataFrameIndex = "pivot_reviews_continuous"; + setupDataAccessRole(DATA_ACCESS_ROLE, indexName, dataFrameIndex); + final Request createDataframeTransformRequest = createRequestWithAuth("PUT", DATAFRAME_ENDPOINT + transformId, + BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); + String config = "{" + + " \"source\": {\"index\":\"" + indexName + "\"}," + + " \"dest\": {\"index\":\"" + dataFrameIndex + "\"}," + + " \"sync\": {\"time\": {\"field\": \"timestamp\", \"delay\": \"1s\"}}," + + " \"pivot\": {" + + " \"group_by\": {" + + " \"reviewer\": {" + + " \"terms\": {" + + " \"field\": \"user_id\"" + + " } } }," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } } } }" + + "}"; + createDataframeTransformRequest.setJsonEntity(config); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + startAndWaitForContinuousTransform(transformId, dataFrameIndex, null); + assertTrue(indexExists(dataFrameIndex)); + // get and check some users + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_0", 3.776978417); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_5", 3.72); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_11", 3.846153846); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_20", 3.769230769); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_26", 3.918918918); + + Map indexStats = getAsMap(dataFrameIndex + "/_stats"); + assertEquals(27, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); + final StringBuilder bulk = new StringBuilder(); + long user = 42; + long user26 = 26; + + long dateStamp = Instant.now().toEpochMilli() - 1_000; + for (int i = 0; i < 25; i++) { + bulk.append("{\"index\":{\"_index\":\"" + indexName + "\"}}\n"); + int stars = (i * 32) % 5; + long business = (stars * user) % 13; + String location = (user + 10) + "," + (user + 15); + + bulk.append("{\"user_id\":\"") + .append("user_") + .append(user) + .append("\",\"business_id\":\"") + .append("business_") + .append(business) + .append("\",\"stars\":") + .append(stars) + .append(",\"location\":\"") + .append(location) + .append("\",\"timestamp\":") + .append(dateStamp) + .append("}\n"); + + stars = 5; + business = 11; + bulk.append("{\"index\":{\"_index\":\"" + indexName + "\"}}\n"); + bulk.append("{\"user_id\":\"") + .append("user_") + .append(user26) + .append("\",\"business_id\":\"") + .append("business_") + .append(business) + .append("\",\"stars\":") + .append(stars) + .append(",\"location\":\"") + .append(location) + .append("\",\"timestamp\":") + .append(dateStamp) + .append("}\n"); + } + bulk.append("\r\n"); + + final Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.addParameter("refresh", "true"); + bulkRequest.setJsonEntity(bulk.toString()); + client().performRequest(bulkRequest); + + waitForDataFrameCheckpoint(transformId, 2); + + stopDataFrameTransform(transformId, false); + refreshIndex(dataFrameIndex); + + // assert that other users are unchanged + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_0", 3.776978417); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_5", 3.72); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_11", 3.846153846); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_20", 3.769230769); + + + Map user26searchResult = getAsMap(dataFrameIndex + "/_search?q=reviewer:user_26"); + assertEquals(1, XContentMapValues.extractValue("hits.total.value", user26searchResult)); + double actual = (Double) ((List) XContentMapValues.extractValue("hits.hits._source.avg_rating", user26searchResult)) + .get(0); + assertThat(actual, greaterThan(3.92)); + + Map user42searchResult = getAsMap(dataFrameIndex + "/_search?q=reviewer:user_42"); + assertEquals(1, XContentMapValues.extractValue("hits.total.value", user42searchResult)); + actual = (Double) ((List) XContentMapValues.extractValue("hits.hits._source.avg_rating", user42searchResult)) + .get(0); + assertThat(actual, greaterThan(0.0)); + assertThat(actual, lessThan(5.0)); + } + public void testHistogramPivot() throws Exception { String transformId = "simple_histogram_pivot"; String dataFrameIndex = "pivot_reviews_via_histogram"; diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 30435a8490328..c8d7bf28842e3 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -51,10 +51,7 @@ protected Settings restClientSettings() { return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", BASIC_AUTH_VALUE_SUPER_USER).build(); } - /** - * Create a simple dataset for testing with reviewers, ratings and businesses - */ - protected void createReviewsIndex() throws IOException { + protected void createReviewsIndex(String indexName) throws IOException { int[] distributionTable = {5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 4, 4, 4, 3, 3, 2, 1, 1, 1}; final int numDocs = 1000; @@ -65,27 +62,27 @@ protected void createReviewsIndex() throws IOException { { builder.startObject("mappings") .startObject("properties") - .startObject("timestamp") - .field("type", "date") - .endObject() - .startObject("user_id") - .field("type", "keyword") - .endObject() - .startObject("business_id") - .field("type", "keyword") - .endObject() - .startObject("stars") - .field("type", "integer") - .endObject() - .startObject("location") - .field("type", "geo_point") - .endObject() + .startObject("timestamp") + .field("type", "date") + .endObject() + .startObject("user_id") + .field("type", "keyword") + .endObject() + .startObject("business_id") + .field("type", "keyword") + .endObject() + .startObject("stars") + .field("type", "integer") + .endObject() + .startObject("location") + .field("type", "geo_point") + .endObject() .endObject() - .endObject(); + .endObject(); } builder.endObject(); final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - Request req = new Request("PUT", REVIEWS_INDEX_NAME); + Request req = new Request("PUT", indexName); req.setEntity(entity); client().performRequest(req); } @@ -96,7 +93,7 @@ protected void createReviewsIndex() throws IOException { int hour = 10; int min = 10; for (int i = 0; i < numDocs; i++) { - bulk.append("{\"index\":{\"_index\":\"" + REVIEWS_INDEX_NAME + "\"}}\n"); + bulk.append("{\"index\":{\"_index\":\"" + indexName + "\"}}\n"); long user = Math.round(Math.pow(i * 31 % 1000, distributionTable[i % distributionTable.length]) % 27); int stars = distributionTable[(i * 33) % distributionTable.length]; long business = Math.round(Math.pow(user * stars, distributionTable[i % distributionTable.length]) % 13); @@ -142,6 +139,12 @@ protected void createReviewsIndex() throws IOException { bulkRequest.setJsonEntity(bulk.toString()); client().performRequest(bulkRequest); } + /** + * Create a simple dataset for testing with reviewers, ratings and businesses + */ + protected void createReviewsIndex() throws IOException { + createReviewsIndex(REVIEWS_INDEX_NAME); + } protected void createPivotReviewsTransform(String transformId, String dataFrameIndex, String query) throws IOException { createPivotReviewsTransform(transformId, dataFrameIndex, query, null); @@ -152,6 +155,32 @@ protected void createPivotReviewsTransform(String transformId, String dataFrameI createPivotReviewsTransform(transformId, dataFrameIndex, query, pipeline, null); } + protected void createContinuousPivotReviewsTransform(String transformId, String dataFrameIndex, String authHeader) throws IOException { + + final Request createDataframeTransformRequest = createRequestWithAuth("PUT", DATAFRAME_ENDPOINT + transformId, authHeader); + + String config = "{ \"dest\": {\"index\":\"" + dataFrameIndex + "\"}," + + " \"source\": {\"index\":\"" + REVIEWS_INDEX_NAME + "\"}," + + " \"sync\": {\"time\":{\"field\": \"timestamp\", \"delay\": \"15m\"}}," + + " \"pivot\": {" + + " \"group_by\": {" + + " \"reviewer\": {" + + " \"terms\": {" + + " \"field\": \"user_id\"" + + " } } }," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } } } }" + + "}"; + + createDataframeTransformRequest.setJsonEntity(config); + + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + } + protected void createPivotReviewsTransform(String transformId, String dataFrameIndex, String query, String pipeline, String authHeader) throws IOException { @@ -230,11 +259,28 @@ protected void startAndWaitForTransform(String transformId, String dataFrameInde // wait until the dataframe has been created and all data is available waitForDataFrameCheckpoint(transformId); - // TODO: assuming non-continuous data frames, so transform should auto-stop waitForDataFrameStopped(transformId); refreshIndex(dataFrameIndex); } + protected void startAndWaitForContinuousTransform(String transformId, + String dataFrameIndex, + String authHeader) throws Exception { + startAndWaitForContinuousTransform(transformId, dataFrameIndex, authHeader, 1L); + } + + protected void startAndWaitForContinuousTransform(String transformId, + String dataFrameIndex, + String authHeader, + long checkpoint) throws Exception { + // start the transform + startDataframeTransform(transformId, false, authHeader, new String[0]); + assertTrue(indexExists(dataFrameIndex)); + // wait until the dataframe has been created and all data is available + waitForDataFrameCheckpoint(transformId, checkpoint); + refreshIndex(dataFrameIndex); + } + protected Request createRequestWithAuth(final String method, final String endpoint, final String authHeader) { final Request request = new Request(method, endpoint); @@ -255,10 +301,11 @@ void waitForDataFrameStopped(String transformId) throws Exception { } void waitForDataFrameCheckpoint(String transformId) throws Exception { - assertBusy(() -> { - long checkpoint = getDataFrameCheckpoint(transformId); - assertEquals(1, checkpoint); - }, 30, TimeUnit.SECONDS); + waitForDataFrameCheckpoint(transformId, 1L); + } + + void waitForDataFrameCheckpoint(String transformId, long checkpoint) throws Exception { + assertBusy(() -> assertEquals(checkpoint, getDataFrameCheckpoint(transformId)), 30, TimeUnit.SECONDS); } void refreshIndex(String index) throws IOException { diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java index b39ee6b1e6943..6475e122c0c54 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; import org.junit.Before; @@ -42,10 +43,11 @@ public void testUsage() throws Exception { // create transforms createPivotReviewsTransform("test_usage", "pivot_reviews", null); createPivotReviewsTransform("test_usage_no_stats", "pivot_reviews_no_stats", null); + createContinuousPivotReviewsTransform("test_usage_continuous", "pivot_reviews_continuous", null); usageResponse = client().performRequest(new Request("GET", "_xpack/usage")); usageAsMap = entityAsMap(usageResponse); - assertEquals(2, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); - assertEquals(2, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap)); + assertEquals(3, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); + assertEquals(3, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap)); startAndWaitForTransform("test_usage", "pivot_reviews"); stopDataFrameTransform("test_usage", false); @@ -60,6 +62,8 @@ public void testUsage() throws Exception { assertEquals(1, XContentMapValues.extractValue("hits.total.value", hasStatsMap)); }); + startAndWaitForContinuousTransform("test_usage_continuous", "pivot_reviews_continuous", null); + Request getRequest = new Request("GET", DATAFRAME_ENDPOINT + "test_usage/_stats"); Map stats = entityAsMap(client().performRequest(getRequest)); Map expectedStats = new HashMap<>(); @@ -75,12 +79,25 @@ public void testUsage() throws Exception { usageAsMap = entityAsMap(usageResponse); // we should see some stats - assertEquals(2, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); - // TODO: Adjust when continuous is supported + assertEquals(3, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); assertEquals(2, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap)); + assertEquals(1, XContentMapValues.extractValue("data_frame.transforms.started", usageAsMap)); for(String statName : PROVIDED_STATS) { + if (statName.equals(DataFrameIndexerTransformStats.INDEX_TIME_IN_MS.getPreferredName()) + ||statName.equals(DataFrameIndexerTransformStats.SEARCH_TIME_IN_MS.getPreferredName())) { + continue; + } assertEquals("Incorrect stat " + statName, - expectedStats.get(statName), XContentMapValues.extractValue("data_frame.stats." + statName, usageAsMap)); + expectedStats.get(statName) * 2, + XContentMapValues.extractValue("data_frame.stats." + statName, usageAsMap)); } + + stopDataFrameTransform("test_usage_continuous", false); + + usageResponse = client().performRequest(new Request("GET", "_xpack/usage")); + usageAsMap = entityAsMap(usageResponse); + + assertEquals(3, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); + assertEquals(3, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap)); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java index 5d17480d7fdf6..56878c4a2c4bf 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java @@ -606,8 +606,9 @@ protected void doSaveState(IndexerState indexerState, Map positi DataFrameTransformTaskState taskState = transformTask.taskState.get(); - // TODO: check whether continuous data frames is enabled when available - if (indexerState.equals(IndexerState.STARTED) && transformTask.currentCheckpoint.get() == 1) { + if (indexerState.equals(IndexerState.STARTED) + && transformTask.currentCheckpoint.get() == 1 + && this.isContinuous() == false) { // set both to stopped so they are persisted as such taskState = DataFrameTransformTaskState.STOPPED; indexerState = IndexerState.STOPPED; diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml index e45dc2adf5e8d..307ecda231b16 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml @@ -191,6 +191,40 @@ setup: - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } --- +"Test PUT continuous transform": + - do: + data_frame.put_data_frame_transform: + transform_id: "airline-transform-continuous" + body: > + { + "source": { + "index": "airline-data" + }, + "dest": { "index": "airline-data-by-airline-continuous" }, + "pivot": { + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} + }, + "sync": { + "time": { + "field": "time", + "delay": "90m" + } + } + } + - match: { acknowledged: true } + - do: + data_frame.get_data_frame_transform: + transform_id: "airline-transform-continuous" + - match: { count: 1 } + - match: { transforms.0.id: "airline-transform-continuous" } + - match: { transforms.0.source.index.0: "airline-data" } + - match: { transforms.0.dest.index: "airline-data-by-airline-continuous" } + - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } + - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } + - match: { transforms.0.sync.time.field: "time" } + - match: { transforms.0.sync.time.delay: "90m" } +--- "Test transform with invalid page parameter": - do: catch: /Param \[size\] has a max acceptable value of \[1000\]/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml index 9fb9227e05331..e4ff3c813ce0f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml @@ -129,7 +129,70 @@ teardown: - match: { transforms.0.id: "airline-transform-start-stop" } - match: { transforms.0.state.indexer_state: "/started|indexing/" } - match: { transforms.0.state.task_state: "started" } +--- +"Test start/stop/start continuous transform": + - do: + data_frame.put_data_frame_transform: + transform_id: "airline-transform-start-stop-continuous" + body: > + { + "source": { "index": "airline-data" }, + "dest": { "index": "airline-data-by-airline-start-stop-cont" }, + "pivot": { + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} + } + } + - do: + data_frame.start_data_frame_transform: + transform_id: "airline-transform-start-stop-continuous" + - match: { acknowledged: true } + + - do: + data_frame.get_data_frame_transform_stats: + transform_id: "airline-transform-start-stop-continuous" + - match: { count: 1 } + - match: { transforms.0.id: "airline-transform-start-stop-continuous" } + - match: { transforms.0.state.indexer_state: "/started|indexing/" } + - match: { transforms.0.state.task_state: "started" } + + - do: + data_frame.stop_data_frame_transform: + transform_id: "airline-transform-start-stop-continuous" + wait_for_completion: true + - match: { acknowledged: true } + + - do: + data_frame.get_data_frame_transform_stats: + transform_id: "airline-transform-start-stop-continuous" + - match: { count: 1 } + - match: { transforms.0.id: "airline-transform-start-stop-continuous" } + - match: { transforms.0.state.indexer_state: "stopped" } + - match: { transforms.0.state.task_state: "stopped" } + + - do: + data_frame.start_data_frame_transform: + transform_id: "airline-transform-start-stop-continuous" + - match: { acknowledged: true } + + - do: + data_frame.get_data_frame_transform_stats: + transform_id: "airline-transform-start-stop-continuous" + - match: { count: 1 } + - match: { transforms.0.id: "airline-transform-start-stop-continuous" } + - match: { transforms.0.state.indexer_state: "/started|indexing/" } + - match: { transforms.0.state.task_state: "started" } + + - do: + data_frame.stop_data_frame_transform: + transform_id: "airline-transform-start-stop-continuous" + wait_for_completion: true + - match: { acknowledged: true } + + - do: + data_frame.delete_data_frame_transform: + transform_id: "airline-transform-start-stop-continuous" --- "Test stop missing transform": - do: From 1594892312ed73f72e695692fc87b94f00f77f85 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 26 Jun 2019 13:44:33 -0400 Subject: [PATCH 025/140] [DOCS] Change 'X-Pack APIs' section to 'REST APIs' (#43451) --- .../java/org/elasticsearch/client/XPackClient.java | 4 ++-- docs/reference/index.asciidoc | 4 ++-- docs/reference/redirects.asciidoc | 7 ++++++- docs/reference/rest-api/index.asciidoc | 13 ++++++++----- 4 files changed, 18 insertions(+), 10 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java index a20dfd1ba328a..69cdd329e3984 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -35,8 +35,8 @@ * default distribution of Elasticsearch. All of these APIs will 404 if run * against the OSS distribution of Elasticsearch. *

    - * See the - * X-Pack APIs on elastic.co for more information. + * See the + * REST APIs on elastic.co for more information. */ public final class XPackClient { diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 7bb29cb06730c..f9d4b5a4e0528 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -65,8 +65,6 @@ include::frozen-indices.asciidoc[] include::administering.asciidoc[] -include::rest-api/index.asciidoc[] - include::commands/index.asciidoc[] :edit_url: @@ -76,6 +74,8 @@ include::testing.asciidoc[] include::glossary.asciidoc[] +include::rest-api/index.asciidoc[] + include::release-notes/highlights.asciidoc[] include::migration/index.asciidoc[] diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 579378b5e28fd..4d15ee7e25503 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -615,4 +615,9 @@ the steps needed to migrate. The `common` terms query is deprecated. Use the <> instead. The `match` query skips blocks of documents efficiently, -without any configuration, if the total number of hits is not tracked. \ No newline at end of file +without any configuration, if the total number of hits is not tracked. + +[role="exclude",id="xpack-api"] +=== X-Pack APIs + +{es} {xpack} APIs are now documented in <>. \ No newline at end of file diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index a5b93747dfa1a..dc73ac134d566 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -1,11 +1,14 @@ -[role="xpack"] -[[xpack-api]] -= {xpack} APIs +[[rest-apis]] += REST APIs [partintro] -- -{xpack} exposes REST APIs that are used by the UI components and can be called -directly to configure and access {xpack} features. +{es} exposes REST APIs that are used by the UI components and can be called +directly to configure and access {es} features. + +[NOTE] +We are working on including more {es} APIs in this section. Some content might +not be included yet. * <> * <> From 0d05ec46c2b850f36aad05dc11d4df9eb9151cb0 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 26 Jun 2019 13:46:21 -0700 Subject: [PATCH 026/140] [DOCS] Updates data frame APIs to use API template (#43610) --- .../apis/delete-transform.asciidoc | 35 ++++---- .../apis/get-transform-stats.asciidoc | 61 ++++++++------ .../data-frames/apis/get-transform.asciidoc | 59 ++++++++------ .../apis/preview-transform.asciidoc | 34 ++++---- .../data-frames/apis/put-transform.asciidoc | 69 +++++++++------- .../data-frames/apis/start-transform.asciidoc | 31 +++---- .../data-frames/apis/stop-transform.asciidoc | 81 +++++++++++-------- 7 files changed, 214 insertions(+), 156 deletions(-) diff --git a/docs/reference/data-frames/apis/delete-transform.asciidoc b/docs/reference/data-frames/apis/delete-transform.asciidoc index 77c74b95c438c..7cc911e91acb1 100644 --- a/docs/reference/data-frames/apis/delete-transform.asciidoc +++ b/docs/reference/data-frames/apis/delete-transform.asciidoc @@ -12,21 +12,15 @@ beta[] Deletes an existing {dataframe-transform}. - -==== Request +[discrete] +[[delete-data-frame-transform-request]] +==== {api-request-title} `DELETE _data_frame/transforms/` -==== Description - -NOTE: Before you can delete the {dataframe-transform}, you must stop it. - -==== Path Parameters - -`data_frame_transform_id` (required):: - (string) Identifier for the {dataframe-transform}. - -==== Authorization +[discrete] +[[delete-data-frame-transform-prereqs]] +==== {api-prereq-title} If the {es} {security-features} are enabled, you must have `manage_data_frame_transforms` cluster privileges to use this API. The built-in @@ -34,9 +28,22 @@ If the {es} {security-features} are enabled, you must have see {stack-ov}/security-privileges.html[Security privileges] and {stack-ov}/built-in-roles.html[Built-in roles]. -==== Examples +[discrete] +[[delete-data-frame-transform-desc]] +==== {api-description-title} + +NOTE: Before you can delete the {dataframe-transform}, you must stop it. + +[discrete] +[[delete-data-frame-transform-path-parms]] +==== {api-path-parms-title} + +`` (Required):: + (string) Identifier for the {dataframe-transform}. -The following example deletes the `ecommerce_transform` {dataframe-transform}: +[discrete] +[[delete-data-frame-transform-examples]] +==== {api-example-title} [source,js] -------------------------------------------------- diff --git a/docs/reference/data-frames/apis/get-transform-stats.asciidoc b/docs/reference/data-frames/apis/get-transform-stats.asciidoc index ff7e023d20fed..5751c8a3ea7bc 100644 --- a/docs/reference/data-frames/apis/get-transform-stats.asciidoc +++ b/docs/reference/data-frames/apis/get-transform-stats.asciidoc @@ -12,8 +12,9 @@ beta[] Retrieves usage information for {dataframe-transforms}. - -==== Request +[discrete] +[[get-data-frame-transform-stats-request]] +==== {api-request-title} `GET _data_frame/transforms//_stats` @@ -26,45 +27,57 @@ Retrieves usage information for {dataframe-transforms}. `GET _data_frame/transforms/*/_stats` + -//===== Description +[discrete] +[[get-data-frame-transform-stats-prereqs]] +==== {api-prereq-title} -==== Path Parameters +If the {es} {security-features} are enabled, you must have +`monitor_data_frame_transforms` cluster privileges to use this API. The built-in +`data_frame_transforms_user` role has these privileges. For more information, +see {stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. + +//[discrete] +//[[get-data-frame-transform-stats-desc]] +//===== {api-description-title} -`data_frame_transform_id`:: +[discrete] +[[get-data-frame-transform-stats-path-parms]] +==== {api-path-parms-title} + +`` (Optional):: (string) Identifier for the {dataframe-transform}. It can be a {dataframe-transform} identifier or a wildcard expression. If you do not specify one of these options, the API returns information for all {dataframe-transforms}. + +[discrete] +[[get-data-frame-transform-stats-query-parms]] +==== {api-query-parms-title} -==== Query Parameters +`allow_no_match` (Optional):: + (boolean) Whether to ignore if a wildcard expression matches no + {dataframe-transforms}. This includes `_all` string or when no transforms have + been specified. The default is `true`. -`from`:: +`from` (Optional):: (integer) Skips the specified number of {dataframe-transforms}. The default value is `0`. -`size`:: - (integer) Specifies the maximum number of {dataframe-transforms} to obtain. The default value is `100`. - -`allow_no_match`:: - (boolean) Whether to ignore if a wildcard expression matches no data frame transforms. - This includes `_all` string or when no transforms have been specified. The default is `true`. +`size` (Optional):: + (integer) Specifies the maximum number of {dataframe-transforms} to obtain. + The default value is `100`. -==== Results - -The API returns the following information: +[discrete] +[[get-data-frame-transform-stats-response]] +==== {api-response-body-title} `transforms`:: (array) An array of statistics objects for {dataframe-transforms}, which are sorted by the `id` value in ascending order. -==== Authorization - -If the {es} {security-features} are enabled, you must have -`monitor_data_frame_transforms` cluster privileges to use this API. The built-in -`data_frame_transforms_user` role has these privileges. For more information, -see {stack-ov}/security-privileges.html[Security privileges] and -{stack-ov}/built-in-roles.html[Built-in roles]. - +[discrete] +[[get-data-frame-transform-stats-example]] ==== Examples The following example skips for the first five {dataframe-transforms} and diff --git a/docs/reference/data-frames/apis/get-transform.asciidoc b/docs/reference/data-frames/apis/get-transform.asciidoc index 7a9e86cb520ed..9dd0ff092d754 100644 --- a/docs/reference/data-frames/apis/get-transform.asciidoc +++ b/docs/reference/data-frames/apis/get-transform.asciidoc @@ -12,8 +12,9 @@ beta[] Retrieves configuration information for {dataframe-transforms}. - -==== Request +[discrete] +[[get-data-frame-transform-request]] +==== {api-request-title} `GET _data_frame/transforms/` + @@ -25,46 +26,54 @@ Retrieves configuration information for {dataframe-transforms}. `GET _data_frame/transforms/*` -//===== Description +[discrete] +[[get-data-frame-transform-prereqs]] +==== {api-prereq-title} + +If the {es} {security-features} are enabled, you must have +`monitor_data_frame_transforms` cluster privileges to use this API. The built-in +`data_frame_transforms_user` role has these privileges. For more information, +see {stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. -==== Path Parameters +[discrete] +[[get-data-frame-transform-path-parms]] +==== {api-path-parms-title} -`data_frame_transform_id`:: +`` (Optional):: (string) Identifier for the {dataframe-transform}. It can be a {dataframe-transform} identifier or a wildcard expression. If you do not specify one of these options, the API returns information for all {dataframe-transforms}. + +[discrete] +[[get-data-frame-transform-query-parms]] +==== {api-query-parms-title} -==== Query Parameters +`allow_no_match` (Optional):: + (boolean) Whether to ignore if a wildcard expression matches no + {dataframe-transforms}. This includes `_all` string or when no transforms have + been specified. The default is `true`. -`from`:: +`from` (Optional):: (integer) Skips the specified number of {dataframe-transforms}. The default value is `0`. -`size`:: - (integer) Specifies the maximum number of {dataframe-transforms} to obtain. The default value is `100`. - -`allow_no_match`:: - (boolean) Whether to ignore if a wildcard expression matches no data frame transforms. - This includes `_all` string or when no transforms have been specified. The default is `true`. +`size` (Optional):: + (integer) Specifies the maximum number of {dataframe-transforms} to obtain. + The default value is `100`. -==== Results - -The API returns the following information: +[discrete] +[[get-data-frame-transform-response]] +==== {api-response-body-title} `transforms`:: (array) An array of transform resources, which are sorted by the `id` value in ascending order. -==== Authorization - -If the {es} {security-features} are enabled, you must have -`monitor_data_frame_transforms` cluster privileges to use this API. The built-in -`data_frame_transforms_user` role has these privileges. For more information, -see {stack-ov}/security-privileges.html[Security privileges] and -{stack-ov}/built-in-roles.html[Built-in roles]. - -==== Examples +[discrete] +[[get-data-frame-transform-example]] +==== {api-example-title} The following example retrieves information about a maximum of ten transforms: diff --git a/docs/reference/data-frames/apis/preview-transform.asciidoc b/docs/reference/data-frames/apis/preview-transform.asciidoc index dd8fb6d74d379..d4f2a9e6a12da 100644 --- a/docs/reference/data-frames/apis/preview-transform.asciidoc +++ b/docs/reference/data-frames/apis/preview-transform.asciidoc @@ -12,22 +12,15 @@ beta[] Previews a {dataframe-transform}. - -==== Request +[discrete] +[[preview-data-frame-transform-request]] +==== {api-request-title} `POST _data_frame/transforms/_preview` -//==== Description -//==== Path Parameters - -==== Request Body - -`source`:: The source index or index pattern. - -`pivot`:: Defines the pivot function `group by` fields and the aggregation to -reduce the data. - -==== Authorization +[discrete] +[[preview-data-frame-transform-prereq]] +==== {api-prereq-title} If the {es} {security-features} are enabled, you must have `manage_data_frame_transforms` cluster privileges to use this API. The built-in @@ -37,10 +30,19 @@ If the {es} {security-features} are enabled, you must have {stack-ov}/security-privileges.html[Security privileges] and {stack-ov}/built-in-roles.html[Built-in roles]. -==== Examples +[discrete] +[[preview-data-frame-transform-request-body]] +==== {api-request-body-title} + +`source` (Required):: + (object) The source index or index pattern. + +`pivot` (Required):: + (object) Defines the pivot function `group by` fields and the aggregation to + reduce the data. See <>. -The following example obtains a preview of a {dataframe-transform} on the {kib} -eCommerce sample data: +[discrete] +==== {api-example-title} [source,js] -------------------------------------------------- diff --git a/docs/reference/data-frames/apis/put-transform.asciidoc b/docs/reference/data-frames/apis/put-transform.asciidoc index 93ce6db6df3ab..a24cc7d224531 100644 --- a/docs/reference/data-frames/apis/put-transform.asciidoc +++ b/docs/reference/data-frames/apis/put-transform.asciidoc @@ -12,12 +12,28 @@ beta[] Instantiates a {dataframe-transform}. - -==== Request +[discrete] +[[put-data-frame-transform-request]] +==== {api-request-title} `PUT _data_frame/transforms/` -===== Description +[discrete] +[[put-data-frame-transform-prereqs]] +==== {api-prereq-title} + +If the {es} {security-features} are enabled, you must have +`manage_data_frame_transforms` cluster privileges to use this API. The built-in +`data_frame_transforms_admin` role has these privileges. You must also +have `read` and `view_index_metadata` privileges on the source index and `read`, +`create_index`, and `index` privileges on the destination index. For more +information, see {stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. + + +[discrete] +[[put-data-frame-transform-desc]] +===== {api-description-title} IMPORTANT: You must use {kib} or this API to create a {dataframe-transform}. Do not put a {dataframe-transform} directly into any @@ -25,42 +41,37 @@ IMPORTANT: You must use {kib} or this API to create a {dataframe-transform}. If {es} {security-features} are enabled, do not give users any privileges on `.data-frame-internal*` indices. -==== Path Parameters +[discrete] +[[put-data-frame-transform-path-parms]] +==== {api-path-parms-title} -`data_frame_transform_id` (required):: +`` (Required):: (string) Identifier for the {dataframe-transform}. This identifier can contain lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It must start and end with alphanumeric characters. +[discrete] +[[put-data-frame-transform-request-body]] +==== {api-request-body-title} -==== Request Body - -`source` (required):: (object) The source configuration, consisting of `index` and optionally -a `query`. +`description` (Optional):: + (string) Free text description of the {dataframe-transform}. -`dest` (required):: (object) The destination configuration, consisting of `index` and optionally a -`pipeline` id. +`dest` (Required):: + (object) The destination configuration, which consists of `index` and + optionally a `pipeline` id. -`pivot`:: (object) Defines the pivot function `group by` fields and the aggregation to -reduce the data. See <>. - -`description`:: Optional free text description of the data frame transform - - -==== Authorization - -If the {es} {security-features} are enabled, you must have -`manage_data_frame_transforms` cluster privileges to use this API. The built-in -`data_frame_transforms_admin` role has these privileges. You must also -have `read` and `view_index_metadata` privileges on the source index and `read`, -`create_index`, and `index` privileges on the destination index. For more -information, see {stack-ov}/security-privileges.html[Security privileges] and -{stack-ov}/built-in-roles.html[Built-in roles]. +`pivot` (Optional):: + (object) Defines the pivot function `group by` fields and the aggregation to + reduce the data. See <>. -==== Examples +`source` (Required):: + (object) The source configuration, which consists of `index` and optionally + a `query`. -The following example creates a {dataframe-transform} for the {kib} eCommerce -sample data: +[discrete] +[[put-data-frame-transform-example]] +==== {api-example-title} [source,js] -------------------------------------------------- diff --git a/docs/reference/data-frames/apis/start-transform.asciidoc b/docs/reference/data-frames/apis/start-transform.asciidoc index 7baefb34ee313..059f8a634146f 100644 --- a/docs/reference/data-frames/apis/start-transform.asciidoc +++ b/docs/reference/data-frames/apis/start-transform.asciidoc @@ -12,21 +12,15 @@ beta[] Starts one or more {dataframe-transforms}. -==== Request +[discrete] +[[start-data-frame-transform-request]] +==== {api-request-title} `POST _data_frame/transforms//_start` -//==== Description - -==== Path Parameters - -`data_frame_transform_id` (required):: - (string) Identifier for the {dataframe-transform}. This identifier can contain - lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It - must start and end with alphanumeric characters. - -//==== Request Body -==== Authorization +[discrete] +[[start-data-frame-transform-prereqs]] +==== {api-prereq-title} If the {es} {security-features} are enabled, you must have `manage_data_frame_transforms` cluster privileges to use this API. You must also @@ -35,9 +29,18 @@ have `view_index_metadata` privileges on the source index for the {stack-ov}/security-privileges.html[Security privileges] and {stack-ov}/built-in-roles.html[Built-in roles]. -==== Examples +[discrete] +[[start-data-frame-transform-path-parms]] +==== {api-path-parms-title} + +`` (Required):: + (string) Identifier for the {dataframe-transform}. This identifier can contain + lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It + must start and end with alphanumeric characters. -The following example starts the `ecommerce_transform` {dataframe-transform}: +[discrete] +[[start-data-frame-transform-example]] +==== {api-example-title} [source,js] -------------------------------------------------- diff --git a/docs/reference/data-frames/apis/stop-transform.asciidoc b/docs/reference/data-frames/apis/stop-transform.asciidoc index 3620c24a88d3d..4ade2706ab2b6 100644 --- a/docs/reference/data-frames/apis/stop-transform.asciidoc +++ b/docs/reference/data-frames/apis/stop-transform.asciidoc @@ -12,7 +12,9 @@ beta[] Stops one or more {dataframe-transforms}. -==== Request +[discrete] +[[stop-data-frame-transform-request]] +==== {api-request-title} `POST _data_frame/transforms//_stop` + @@ -20,48 +22,59 @@ Stops one or more {dataframe-transforms}. `POST _data_frame/transforms/_all/_stop` +[discrete] +[[stop-data-frame-transform-prereq]] +==== {api-prereq-title} + +If the {es} {security-features} are enabled, you must have +`manage_data_frame_transforms` cluster privileges to use this API. The built-in +`data_frame_transforms_admin` role has these privileges. For more information, +see {stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. + +[discrete] +[[stop-data-frame-transform-desc]] +==== {api-description-title} -==== Description You can stop multiple {dataframe-transforms} in a single API request by using a comma-separated list of {dataframe-transforms} or a wildcard expression. -All {dataframe-transforms} can be stopped by using `_all` or `*` as the ``. +All {dataframe-transforms} can be stopped by using `_all` or `*` as the +``. -==== Path Parameters +[discrete] +[[stop-data-frame-transform-path-parms]] +==== {api-path-parms-title} -`data_frame_transform_id` (required):: +`` (Required):: (string) Identifier for the {dataframe-transform}. This identifier can contain lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It must start and end with alphanumeric characters. -==== Query Parameters - -`wait_for_completion`:: - (boolean) If set to true, causes the API to block until the indexer state completely stops. If set to false, the API returns immediately and the indexer will be stopped asynchronously in the background. Defaults to `false`. - - `timeout`:: - (time value) If `wait_for_completion=true`, the API blocks for (at maximum) - the specified duration while waiting for the transform to stop. If more than - `timeout` time has passed, the API throws a timeout exception. Even if a - timeout exception is thrown, the stop request is still processing and - eventually moves the transform to `STOPPED`. The timeout simply means the API - call itself timed out while waiting for the status change. Defaults to `30s` - - `allow_no_match`:: - (boolean) Whether to ignore if a wildcard expression matches no data frame transforms. - This includes `_all` string or when no transforms have been specified. The default is `true`. - -//==== Request Body -==== Authorization - -If the {es} {security-features} are enabled, you must have -`manage_data_frame_transforms` cluster privileges to use this API. The built-in -`data_frame_transforms_admin` role has these privileges. For more information, -see {stack-ov}/security-privileges.html[Security privileges] and -{stack-ov}/built-in-roles.html[Built-in roles]. - -==== Examples - -The following example stops the `ecommerce_transform` {dataframe-transform}: +[discrete] +[[stop-data-frame-transform-query-parms]] +==== {api-query-parms-title} + +`allow_no_match` (Optional):: + (boolean) Whether to ignore if a wildcard expression matches no + {dataframe-transforms}. This includes `_all` string or when no transforms have + been specified. The default is `true`. + +`timeout` (Optional):: + (time value) If `wait_for_completion=true`, the API blocks for (at maximum) + the specified duration while waiting for the transform to stop. If more than + `timeout` time has passed, the API throws a timeout exception. Even if a + timeout exception is thrown, the stop request is still processing and + eventually moves the transform to `STOPPED`. The timeout simply means the API + call itself timed out while waiting for the status change. Defaults to `30s` + +`wait_for_completion` (Optional):: + (boolean) If set to `true`, causes the API to block until the indexer state + completely stops. If set to `false`, the API returns immediately and the + indexer will be stopped asynchronously in the background. Defaults to `false`. + +[discrete] +[[stop-data-frame-transform-example]] +==== {api-example-title} [source,js] -------------------------------------------------- From 80fbe9c7c29b008ae8a3472f70661e2c73326eb1 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 26 Jun 2019 14:24:36 -0700 Subject: [PATCH 027/140] Remove compile-time dependency on test fixtures (#43651) --- x-pack/qa/kerberos-tests/build.gradle | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 64029c34724af..3095bb6364ffa 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -45,6 +45,12 @@ testClusters.integTest { user username: "test_kibana_user", password: "x-pack-test-password", role: "kibana_system" } +task copyKeytabToGeneratedResources(type: Copy) { + from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "peppa.keytab") + into "$buildDir/generated-resources/keytabs" + dependsOn project(':test:fixtures:krb5kdc-fixture').postProcessFixture +} + String realm = "BUILD.ELASTIC.CO" integTest.runner { Path peppaKeytab = Paths.get("${project.buildDir}", "generated-resources", "keytabs", "peppa.keytab") @@ -57,12 +63,5 @@ integTest.runner { "-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("peppa")}", "-Dsun.security.krb5.debug=true" ]) + classpath += copyKeytabToGeneratedResources.outputs.files } - -def generatedResources = "$buildDir/generated-resources/keytabs" -task copyKeytabToGeneratedResources(type: Copy) { - from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "peppa.keytab") - into generatedResources - dependsOn project(':test:fixtures:krb5kdc-fixture').postProcessFixture -} -project.sourceSets.test.output.dir(generatedResources, builtBy:copyKeytabToGeneratedResources) From f6bc4b12999eecb7d77d6062490cbdcf754f8ac4 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Thu, 27 Jun 2019 06:55:47 +0300 Subject: [PATCH 028/140] Mute failing test Tracked in #43670 --- .../xpack/ml/integration/MlDistributedFailureIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 297e45cd3caaa..711c9687144ca 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -116,6 +116,7 @@ public void testFullClusterRestart() throws Exception { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/43670") public void testCloseUnassignedJobAndDatafeed() throws Exception { internalCluster().ensureAtMostNumDataNodes(0); logger.info("Starting dedicated master node..."); From 98ed5e985f6426154118774a5366b23a996884a3 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 27 Jun 2019 08:27:01 +0300 Subject: [PATCH 029/140] Make the ignore_above docs tests more robust. (#43349) It is possible for internal ML indices like `.data-frame-notifications-1` to leak, causing other docs tests to fail when they accidentally search over these indices. This PR updates the ignore_above tests to only search a specific index. --- docs/reference/mapping/params/ignore-above.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/mapping/params/ignore-above.asciidoc b/docs/reference/mapping/params/ignore-above.asciidoc index daf5c92bcf34d..33c0eaf339f24 100644 --- a/docs/reference/mapping/params/ignore-above.asciidoc +++ b/docs/reference/mapping/params/ignore-above.asciidoc @@ -30,7 +30,7 @@ PUT my_index/_doc/2 <3> "message": "Syntax error with some long stacktrace" } -GET _search <4> +GET my_index/_search <4> { "aggs": { "messages": { From d2c696d54b44ab66b850d30b0f68ad8e725dbbcc Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Thu, 27 Jun 2019 08:56:26 +0100 Subject: [PATCH 030/140] Require [articles] setting in elision filter (#43083) We should throw an exception at construction time if a list of articles is not provided, otherwise we can get random NPEs during indexing. Relates to #43002 --- .../tokenfilters/elision-tokenfilter.asciidoc | 5 ++- .../analysis/common/CommonAnalysisPlugin.java | 2 +- .../common/ElisionTokenFilterFactory.java | 3 ++ .../common/ElisionFilterFactoryTests.java | 43 +++++++++++++++++++ .../test/analysis-common/40_token_filters.yml | 14 ++++++ 5 files changed, 64 insertions(+), 3 deletions(-) create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ElisionFilterFactoryTests.java diff --git a/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc index 2ff19cebe893e..34646a0413e36 100644 --- a/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc @@ -4,8 +4,9 @@ A token filter which removes elisions. For example, "l'avion" (the plane) will tokenized as "avion" (plane). -Accepts `articles` parameter which is a set of stop words articles. Also accepts -`articles_case`, which indicates whether the filter treats those articles as +Requires either an `articles` parameter which is a set of stop word articles, or +`articles_path` which points to a text file containing the stop set. Also optionally +accepts `articles_case`, which indicates whether the filter treats those articles as case sensitive. For example: diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index f095b766ee1d5..ca53cb8bf3953 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -237,7 +237,7 @@ public Map> getTokenFilters() { filters.put("dutch_stem", DutchStemTokenFilterFactory::new); filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new); filters.put("edgeNGram", EdgeNGramTokenFilterFactory::new); - filters.put("elision", ElisionTokenFilterFactory::new); + filters.put("elision", requiresAnalysisSettings(ElisionTokenFilterFactory::new)); filters.put("fingerprint", FingerprintTokenFilterFactory::new); filters.put("flatten_graph", FlattenGraphTokenFilterFactory::new); filters.put("french_stem", FrenchStemTokenFilterFactory::new); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ElisionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ElisionTokenFilterFactory.java index 52cb69952b836..39d042caa8c25 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ElisionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ElisionTokenFilterFactory.java @@ -36,6 +36,9 @@ public class ElisionTokenFilterFactory extends AbstractTokenFilterFactory implem ElisionTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); this.articles = Analysis.parseArticles(env, settings); + if (this.articles == null) { + throw new IllegalArgumentException("elision filter requires [articles] or [articles_path] setting"); + } } @Override diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ElisionFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ElisionFilterFactoryTests.java new file mode 100644 index 0000000000000..dbfd49d5649d5 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ElisionFilterFactoryTests.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.analysis.AnalysisTestsHelper; +import org.elasticsearch.test.ESTokenStreamTestCase; + +import java.io.IOException; + +public class ElisionFilterFactoryTests extends ESTokenStreamTestCase { + + public void testElisionFilterWithNoArticles() throws IOException { + Settings settings = Settings.builder() + .put("index.analysis.filter.elision.type", "elision") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())); + + assertEquals("elision filter requires [articles] or [articles_path] setting", e.getMessage()); + } + +} diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml index 3486b9defd9d2..92d0dce7b6201 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml @@ -587,6 +587,20 @@ - length: { tokens: 1 } - match: { tokens.0.token: avion } + - do: + catch: bad_request + indices.create: + index: test2 + body: + settings: + analysis: + filter: + my_elision: + type: elision + - match: { status: 400 } + - match: { error.type: illegal_argument_exception } + - match: { error.reason: "elision filter requires [articles] or [articles_path] setting" } + --- "stemmer": - do: From fbefb4690ef804596d965959d6aa50c16aea7dc3 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Thu, 27 Jun 2019 09:01:53 +0100 Subject: [PATCH 031/140] Use preconfigured filters correctly in Analyze API (#43568) When a named token filter or char filter is passed as part of an Analyze API request with no index, we currently try and build the relevant filter using no index settings. However, this can miss cases where there is a pre-configured filter defined in the analysis registry. One example here is the elision filter, which has a pre-configured version built with the french elision set; when used as part of normal analysis, this preconfigured set is used, but when used as part of the Analyze API we end up with NPEs because it tries to instantiate the filter with no index settings. This commit changes the Analyze API to check for pre-configured filters in the case that the request has no index defined, and is using a name rather than a custom definition for a filter. It also changes the pre-configured `word_delimiter_graph` filter and `edge_ngram` tokenizer to make their settings consistent with the defaults used when creating them with no settings Closes #43002 Closes #43621 Closes #43582 --- .../analysis/common/CommonAnalysisPlugin.java | 26 +++-- .../common/EdgeNGramTokenizerTests.java | 98 +++++++++++++++++++ ...DelimiterGraphTokenFilterFactoryTests.java | 57 +++++++++++ .../index/analysis/AnalysisRegistry.java | 20 ++-- .../indices/TransportAnalyzeActionTests.java | 44 ++++----- 5 files changed, 208 insertions(+), 37 deletions(-) create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index ca53cb8bf3953..5ecd491fef34a 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -83,6 +83,7 @@ import org.apache.lucene.analysis.miscellaneous.TruncateTokenFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter; +import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; import org.apache.lucene.analysis.ngram.NGramTokenFilter; @@ -110,6 +111,7 @@ import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.apache.lucene.analysis.util.ElisionFilter; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.Version; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -455,13 +457,15 @@ public List getPreConfiguredTokenFilters() { | WordDelimiterFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterFilter.SPLIT_ON_NUMERICS | WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE, null))); - filters.add(PreConfiguredTokenFilter.singleton("word_delimiter_graph", false, false, input -> - new WordDelimiterGraphFilter(input, + filters.add(PreConfiguredTokenFilter.singletonWithVersion("word_delimiter_graph", false, false, (input, version) -> { + boolean adjustOffsets = version.onOrAfter(Version.V_7_3_0); + return new WordDelimiterGraphFilter(input, adjustOffsets, WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE, WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS | WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS - | WordDelimiterGraphFilter.STEM_ENGLISH_POSSESSIVE, null))); + | WordDelimiterGraphFilter.STEM_ENGLISH_POSSESSIVE, null); + })); return filters; } @@ -475,8 +479,12 @@ public List getPreConfiguredTokenizers() { tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new)); - tokenizers.add(PreConfiguredTokenizer.singleton("edge_ngram", - () -> new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE))); + tokenizers.add(PreConfiguredTokenizer.elasticsearchVersion("edge_ngram", (version) -> { + if (version.onOrAfter(Version.V_7_3_0)) { + return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); + } + return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); + })); tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1))); tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new)); // TODO deprecate and remove in API @@ -485,8 +493,12 @@ public List getPreConfiguredTokenizers() { // Temporary shim for aliases. TODO deprecate after they are moved tokenizers.add(PreConfiguredTokenizer.singleton("nGram", NGramTokenizer::new)); - tokenizers.add(PreConfiguredTokenizer.singleton("edgeNGram", - () -> new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE))); + tokenizers.add(PreConfiguredTokenizer.elasticsearchVersion("edgeNGram", (version) -> { + if (version.onOrAfter(Version.V_7_3_0)) { + return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); + } + return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); + })); tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new)); return tokenizers; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java new file mode 100644 index 0000000000000..0172f7cbc2657 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.VersionUtils; + +import java.io.IOException; +import java.util.Collections; + +public class EdgeNGramTokenizerTests extends ESTokenStreamTestCase { + + private IndexAnalyzers buildAnalyzers(Version version, String tokenizer) throws IOException { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, version) + .put("index.analysis.analyzer.my_analyzer.tokenizer", tokenizer) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + return new AnalysisModule(TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings); + } + + public void testPreConfiguredTokenizer() throws IOException { + + // Before 7.3 we return ngrams of length 1 only + { + Version version = VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, + VersionUtils.getPreviousVersion(Version.V_7_3_0)); + try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edge_ngram")) { + NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "test", new String[]{"t"}); + } + } + + // Check deprecated name as well + { + Version version = VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, + VersionUtils.getPreviousVersion(Version.V_7_3_0)); + try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edgeNGram")) { + NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "test", new String[]{"t"}); + } + } + + // Afterwards, we return ngrams of length 1 and 2, to match the default factory settings + { + try (IndexAnalyzers indexAnalyzers = buildAnalyzers(Version.CURRENT, "edge_ngram")) { + NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "test", new String[]{"t", "te"}); + } + } + + // Check deprecated name as well + { + try (IndexAnalyzers indexAnalyzers = buildAnalyzers(Version.CURRENT, "edgeNGram")) { + NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "test", new String[]{"t", "te"}); + + } + } + + } + +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java index d799674f231a1..c8e3699ea840d 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java @@ -20,14 +20,24 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisTestsHelper; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.io.StringReader; +import java.util.Collections; public class WordDelimiterGraphTokenFilterFactoryTests extends BaseWordDelimiterTokenFilterFactoryTestCase { @@ -107,4 +117,51 @@ public void testAdjustingOffsets() throws IOException { assertTokenStreamContents(tokenFilter.create(tokenizer), expected, expectedStartOffsets, expectedEndOffsets, null, expectedIncr, expectedPosLen, null); } + + public void testPreconfiguredFilter() throws IOException { + // Before 7.3 we don't adjust offsets + { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, VersionUtils.getPreviousVersion(Version.V_7_3_0))) + .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + + try (IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings)) { + + NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "h100", new String[]{"h", "100"}, new int[]{ 0, 0 }, new int[]{ 4, 4 }); + + } + } + + // Afger 7.3 we do adjust offsets + { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + + try (IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings)) { + + NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "h100", new String[]{"h", "100"}, new int[]{ 0, 1 }, new int[]{ 1, 4 }); + + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index b198a66d24a49..496b1eb3bfaea 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -111,6 +111,7 @@ private static Settings getSettingsFromIndexSettings(IndexSettings indexSettings private T getComponentFactory(IndexSettings settings, NameOrDefinition nod, String componentType, Function> globalComponentProvider, + Function> prebuiltComponentProvider, BiFunction> indexComponentProvider) throws IOException { if (nod.definition != null) { // custom component, so we build it from scratch @@ -128,10 +129,14 @@ private T getComponentFactory(IndexSettings settings, NameOrDefinition nod, return factory.get(settings, environment, "__anonymous__" + type, nod.definition); } if (settings == null) { - // no index provided, so we use global analysis components only - AnalysisProvider factory = globalComponentProvider.apply(nod.name); + // no index provided, so we use prebuilt analysis components + AnalysisProvider factory = prebuiltComponentProvider.apply(nod.name); if (factory == null) { - throw new IllegalArgumentException("failed to find global " + componentType + " under [" + nod.name + "]"); + // if there's no prebuilt component, try loading a global one to build with no settings + factory = globalComponentProvider.apply(nod.name); + if (factory == null) { + throw new IllegalArgumentException("failed to find global " + componentType + " under [" + nod.name + "]"); + } } return factory.get(environment, nod.name); } else { @@ -217,25 +222,26 @@ public IndexAnalyzers build(IndexSettings indexSettings) throws IOException { public NamedAnalyzer buildCustomAnalyzer(IndexSettings indexSettings, boolean normalizer, NameOrDefinition tokenizer, List charFilters, List tokenFilters) throws IOException { TokenizerFactory tokenizerFactory - = getComponentFactory(indexSettings, tokenizer, "tokenizer", this::getTokenizerProvider, this::getTokenizerProvider); + = getComponentFactory(indexSettings, tokenizer, "tokenizer", + this::getTokenizerProvider, prebuiltAnalysis::getTokenizerFactory, this::getTokenizerProvider); List charFilterFactories = new ArrayList<>(); for (NameOrDefinition nod : charFilters) { charFilterFactories.add(getComponentFactory(indexSettings, nod, "char_filter", - this::getCharFilterProvider, this::getCharFilterProvider)); + this::getCharFilterProvider, prebuiltAnalysis::getCharFilterFactory, this::getCharFilterProvider)); } List tokenFilterFactories = new ArrayList<>(); for (NameOrDefinition nod : tokenFilters) { TokenFilterFactory tff = getComponentFactory(indexSettings, nod, "filter", - this::getTokenFilterProvider, this::getTokenFilterProvider); + this::getTokenFilterProvider, prebuiltAnalysis::getTokenFilterFactory, this::getTokenFilterProvider); if (normalizer && tff instanceof NormalizingTokenFilterFactory == false) { throw new IllegalArgumentException("Custom normalizer may not use filter [" + tff.name() + "]"); } tff = tff.getChainAwareTokenFilterFactory(tokenizerFactory, charFilterFactories, tokenFilterFactories, name -> { try { return getComponentFactory(indexSettings, new NameOrDefinition(name), "filter", - this::getTokenFilterProvider, this::getTokenFilterProvider); + this::getTokenFilterProvider, prebuiltAnalysis::getTokenFilterFactory, this::getTokenFilterProvider); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 1ffd7410fa66a..72830f79889c0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -141,7 +141,7 @@ public Map> getTokenFilters() { @Override public List getPreConfiguredCharFilters() { - return singletonList(PreConfiguredCharFilter.singleton("append_foo", false, reader -> new AppendCharFilter(reader, "foo"))); + return singletonList(PreConfiguredCharFilter.singleton("append", false, reader -> new AppendCharFilter(reader, "foo"))); } }; registry = new AnalysisModule(environment, singletonList(plugin)).getAnalysisRegistry(); @@ -170,24 +170,11 @@ public void testNoIndexAnalyzers() throws IOException { List tokens = analyze.getTokens(); assertEquals(4, tokens.size()); - // Refer to a token filter by its type so we get its default configuration - request = new AnalyzeAction.Request(); - request.text("the qu1ck brown fox"); - request.tokenizer("standard"); - request.addTokenFilter("mock"); - analyze - = TransportAnalyzeAction.analyze(request, registry, null, maxTokenCount); - tokens = analyze.getTokens(); - assertEquals(3, tokens.size()); - assertEquals("qu1ck", tokens.get(0).getTerm()); - assertEquals("brown", tokens.get(1).getTerm()); - assertEquals("fox", tokens.get(2).getTerm()); - // We can refer to a pre-configured token filter by its name to get it request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); request.tokenizer("standard"); - request.addCharFilter("append_foo"); + request.addCharFilter("append"); // <-- no config, so use preconfigured filter analyze = TransportAnalyzeAction.analyze(request, registry, null, maxTokenCount); tokens = analyze.getTokens(); @@ -197,35 +184,46 @@ public void testNoIndexAnalyzers() throws IOException { assertEquals("brown", tokens.get(2).getTerm()); assertEquals("foxfoo", tokens.get(3).getTerm()); - // We can refer to a token filter by its type to get its default configuration + // If the preconfigured filter doesn't exist, we use a global filter with no settings request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); request.tokenizer("standard"); - request.addCharFilter("append"); + request.addTokenFilter("mock"); // <-- not preconfigured, but a global one available + analyze + = TransportAnalyzeAction.analyze(request, registry, null, maxTokenCount); + tokens = analyze.getTokens(); + assertEquals(3, tokens.size()); + assertEquals("qu1ck", tokens.get(0).getTerm()); + assertEquals("brown", tokens.get(1).getTerm()); + assertEquals("fox", tokens.get(2).getTerm()); + + // We can build a new char filter to get default values + request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); + request.tokenizer("standard"); + request.addTokenFilter(Map.of("type", "mock", "stopword", "brown")); + request.addCharFilter(Map.of("type", "append")); // <-- basic config, uses defaults analyze = TransportAnalyzeAction.analyze(request, registry, null, maxTokenCount); tokens = analyze.getTokens(); - assertEquals(4, tokens.size()); + assertEquals(3, tokens.size()); assertEquals("the", tokens.get(0).getTerm()); assertEquals("qu1ck", tokens.get(1).getTerm()); - assertEquals("brown", tokens.get(2).getTerm()); - assertEquals("foxbar", tokens.get(3).getTerm()); + assertEquals("foxbar", tokens.get(2).getTerm()); // We can pass a new configuration request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); request.tokenizer("standard"); request.addTokenFilter(Map.of("type", "mock", "stopword", "brown")); - request.addCharFilter("append"); - request.text("the qu1ck brown fox"); + request.addCharFilter(Map.of("type", "append", "suffix", "baz")); analyze = TransportAnalyzeAction.analyze(request, registry, null, maxTokenCount); tokens = analyze.getTokens(); assertEquals(3, tokens.size()); assertEquals("the", tokens.get(0).getTerm()); assertEquals("qu1ck", tokens.get(1).getTerm()); - assertEquals("foxbar", tokens.get(2).getTerm()); + assertEquals("foxbaz", tokens.get(2).getTerm()); } public void testFillsAttributes() throws IOException { From 6db8104d225a0e57bbe8be4a7abbedf039290a19 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 27 Jun 2019 10:49:26 +0200 Subject: [PATCH 032/140] Remove Unused AWS KMS Dependency (#43671) * We don't make use of KMS at the moment, no need to have this dependency here --- plugins/repository-s3/build.gradle | 1 - .../repository-s3/licenses/aws-java-sdk-kms-1.11.562.jar.sha1 | 1 - 2 files changed, 2 deletions(-) delete mode 100644 plugins/repository-s3/licenses/aws-java-sdk-kms-1.11.562.jar.sha1 diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 083acf288a83b..160fd43bd52d0 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -33,7 +33,6 @@ versions << [ dependencies { compile "com.amazonaws:aws-java-sdk-s3:${versions.aws}" - compile "com.amazonaws:aws-java-sdk-kms:${versions.aws}" compile "com.amazonaws:aws-java-sdk-core:${versions.aws}" compile "com.amazonaws:jmespath-java:${versions.aws}" compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" diff --git a/plugins/repository-s3/licenses/aws-java-sdk-kms-1.11.562.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-kms-1.11.562.jar.sha1 deleted file mode 100644 index 65c85dc87b184..0000000000000 --- a/plugins/repository-s3/licenses/aws-java-sdk-kms-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1fdf4daf1960fe760e7a950dd28a05c5abc12788 \ No newline at end of file From a4b97b67b1feace5444496ccc4f70cd81fad92b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 27 Jun 2019 14:33:28 +0200 Subject: [PATCH 033/140] Move query builder caching check to dedicated tests (#43238) Currently `AbstractQueryTestCase#testToQuery` checks the search context cachable flag. This is a bit fragile due to the high randomization of query builders performed by this general test. Also we might only rarely check the "interesting" cases because they rarely get generated when fully randomizing the query builder. This change moved the general checks out ot #testToQuery and instead adds dedicated cache tests for those query builders that exhibit something other than the default behaviour. Closes #43200 --- .../PercolateQueryBuilderTests.java | 12 +++++-- .../ScriptScoreQueryBuilder.java | 8 ++--- .../query/MoreLikeThisQueryBuilderTests.java | 31 +++++++++++++--- .../index/query/RangeQueryBuilderTests.java | 21 +++++++++++ .../index/query/ScriptQueryBuilderTests.java | 11 ++++-- .../query/ScriptScoreQueryBuilderTests.java | 11 ++++-- .../index/query/TermsQueryBuilderTests.java | 7 ---- .../query/TermsSetQueryBuilderTests.java | 36 +++++++++++++++++-- .../index/query/TypeQueryBuilderTests.java | 6 ++++ .../FunctionScoreQueryBuilderTests.java | 36 +++++++++++++++++-- .../test/AbstractQueryTestCase.java | 24 +++++++------ 11 files changed, 167 insertions(+), 36 deletions(-) diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 5b4dc61090042..a86f93ce40549 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -296,9 +296,17 @@ private static BytesReference randomSource(Set usedFields) { } } + /** + * Test that this query is never cacheable + */ @Override - protected boolean isCacheable(PercolateQueryBuilder queryBuilder) { - return false; + public void testCacheability() throws IOException { + PercolateQueryBuilder queryBuilder = createTestQueryBuilder(); + QueryShardContext context = createShardContext(); + assert context.isCacheable(); + QueryBuilder rewritten = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewritten.toQuery(context)); + assertFalse("query should not be cacheable: " + queryBuilder.toString(), context.isCacheable()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreQueryBuilder.java index fb53f1c9560cc..b5cb15b3d00aa 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreQueryBuilder.java @@ -23,17 +23,17 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.function.ScriptScoreFunction; +import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.AbstractQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.script.Script; -import org.elasticsearch.common.lucene.search.function.ScriptScoreFunction; -import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery; import org.elasticsearch.index.query.InnerHitContextBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.script.Script; import java.io.IOException; import java.util.Map; diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 9f82ea8a690e2..98c2a162faf06 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -363,9 +363,34 @@ public void testItemFromXContent() throws IOException { assertEquals(expectedItem, newItem); } + /** + * Check that this query is generally not cacheable, except when we fetch 0 items + */ @Override - protected boolean isCacheable(MoreLikeThisQueryBuilder queryBuilder) { - return queryBuilder.likeItems().length == 0; // items are always fetched + public void testCacheability() throws IOException { + MoreLikeThisQueryBuilder queryBuilder = createTestQueryBuilder(); + boolean isCacheable = queryBuilder.likeItems().length == 0; // items are always fetched + QueryShardContext context = createShardContext(); + QueryBuilder rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertEquals("query should " + (isCacheable ? "" : "not") + " be cacheable: " + queryBuilder.toString(), isCacheable, + context.isCacheable()); + + // specifically trigger case where query is cacheable + queryBuilder = new MoreLikeThisQueryBuilder(randomStringFields(), new String[] {"some text"}, null); + context = createShardContext(); + rewriteQuery(queryBuilder, new QueryShardContext(context)); + rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertTrue("query should be cacheable: " + queryBuilder.toString(), context.isCacheable()); + + // specifically trigger case where query is not cacheable + queryBuilder = new MoreLikeThisQueryBuilder(randomStringFields(), null, new Item[] { new Item("foo", "1") }); + context = createShardContext(); + rewriteQuery(queryBuilder, new QueryShardContext(context)); + rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertFalse("query should be cacheable: " + queryBuilder.toString(), context.isCacheable()); } public void testFromJson() throws IOException { @@ -405,8 +430,6 @@ public void testFromJson() throws IOException { protected QueryBuilder parseQuery(XContentParser parser) throws IOException { QueryBuilder query = super.parseQuery(parser); assertThat(query, instanceOf(MoreLikeThisQueryBuilder.class)); - - MoreLikeThisQueryBuilder mltQuery = (MoreLikeThisQueryBuilder) query; return query; } diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index d270a8c7113b5..8c129d689a86e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -601,4 +601,25 @@ public void testTypeField() throws IOException { builder.doToQuery(createShardContext()); assertWarnings(QueryShardContext.TYPES_DEPRECATION_MESSAGE); } + + /** + * Range queries should generally be cacheable, at least the ones we create randomly. + * This test makes sure we also test the non-cacheable cases regularly. + */ + @Override + public void testCacheability() throws IOException { + RangeQueryBuilder queryBuilder = createTestQueryBuilder(); + QueryShardContext context = createShardContext(); + QueryBuilder rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertTrue("query should be cacheable: " + queryBuilder.toString(), context.isCacheable()); + + // queries on date fields using "now" should not be cached + queryBuilder = new RangeQueryBuilder(randomFrom(DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, DATE_ALIAS_FIELD_NAME)); + queryBuilder.to("now"); + context = createShardContext(); + rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertFalse("query should not be cacheable: " + queryBuilder.toString(), context.isCacheable()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java index b0bbca3266bab..b3cac936d3ae2 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java @@ -116,8 +116,15 @@ protected Set getObjectsHoldingArbitraryContent() { return Collections.singleton(Script.PARAMS_PARSE_FIELD.getPreferredName()); } + /** + * Check that this query is generally not cacheable + */ @Override - protected boolean isCacheable(ScriptQueryBuilder queryBuilder) { - return false; + public void testCacheability() throws IOException { + ScriptQueryBuilder queryBuilder = createTestQueryBuilder(); + QueryShardContext context = createShardContext(); + QueryBuilder rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertFalse("query should not be cacheable: " + queryBuilder.toString(), context.isCacheable()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java index ad9af8c49c391..ad420e8bbdc33 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java @@ -88,8 +88,15 @@ public void testIllegalArguments() { ); } + /** + * Check that this query is generally not cacheable + */ @Override - protected boolean isCacheable(ScriptScoreQueryBuilder queryBuilder) { - return false; + public void testCacheability() throws IOException { + ScriptScoreQueryBuilder queryBuilder = createTestQueryBuilder(); + QueryShardContext context = createShardContext(); + QueryBuilder rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertFalse("query should not be cacheable: " + queryBuilder.toString(), context.isCacheable()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 40e32b91d7e55..f782c45c03127 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -281,13 +281,6 @@ public void testGeo() throws Exception { e.getMessage()); } - @Override - protected boolean isCacheable(TermsQueryBuilder queryBuilder) { - // even though we use a terms lookup here we do this during rewrite and that means we are cacheable on toQuery - // that's why we return true here all the time - return super.isCacheable(queryBuilder); - } - public void testSerializationFailsUnlessFetched() throws IOException { QueryBuilder builder = new TermsQueryBuilder(STRING_FIELD_NAME, randomTermsLookup()); QueryBuilder termsQueryBuilder = Rewriteable.rewrite(builder, createShardContext()); diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index f68769bb89cb5..84e90f8901674 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -110,10 +110,42 @@ protected void doAssertLuceneQuery(TermsSetQueryBuilder queryBuilder, Query quer } } + /** + * Check that this query is generally not cacheable and explicitly testing the two conditions when it is not as well + */ @Override - protected boolean isCacheable(TermsSetQueryBuilder queryBuilder) { - return queryBuilder.getMinimumShouldMatchField() != null || + public void testCacheability() throws IOException { + TermsSetQueryBuilder queryBuilder = createTestQueryBuilder(); + boolean isCacheable = queryBuilder.getMinimumShouldMatchField() != null || (queryBuilder.getMinimumShouldMatchScript() != null && queryBuilder.getValues().isEmpty()); + QueryShardContext context = createShardContext(); + rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(queryBuilder.doToQuery(context)); + assertEquals("query should " + (isCacheable ? "" : "not") + " be cacheable: " + queryBuilder.toString(), isCacheable, + context.isCacheable()); + + // specifically trigger the two cases where query is cacheable + queryBuilder = new TermsSetQueryBuilder(STRING_FIELD_NAME, Collections.singletonList("foo")); + queryBuilder.setMinimumShouldMatchField("m_s_m"); + context = createShardContext(); + rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(queryBuilder.doToQuery(context)); + assertTrue("query should be cacheable: " + queryBuilder.toString(), context.isCacheable()); + + queryBuilder = new TermsSetQueryBuilder(STRING_FIELD_NAME, Collections.emptyList()); + queryBuilder.setMinimumShouldMatchScript(new Script(ScriptType.INLINE, MockScriptEngine.NAME, "_script", emptyMap())); + context = createShardContext(); + rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(queryBuilder.doToQuery(context)); + assertTrue("query should be cacheable: " + queryBuilder.toString(), context.isCacheable()); + + // also test one case where query is not cacheable + queryBuilder = new TermsSetQueryBuilder(STRING_FIELD_NAME, Collections.singletonList("foo")); + queryBuilder.setMinimumShouldMatchScript(new Script(ScriptType.INLINE, MockScriptEngine.NAME, "_script", emptyMap())); + context = createShardContext(); + rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(queryBuilder.doToQuery(context)); + assertFalse("query should be cacheable: " + queryBuilder.toString(), context.isCacheable()); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/query/TypeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TypeQueryBuilderTests.java index 67916e52789c5..be2592f01fcf7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TypeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TypeQueryBuilderTests.java @@ -83,4 +83,10 @@ public void testMustRewrite() throws IOException { super.testMustRewrite(); assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); } + + @Override + public void testCacheability() throws IOException { + super.testCacheability(); + assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index 8f177cac863b3..c21c9a56a4c3c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query.functionscore; import com.fasterxml.jackson.core.JsonParseException; + import org.apache.lucene.index.Term; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -40,6 +41,7 @@ import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.RandomQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.WrapperQueryBuilder; @@ -676,7 +678,7 @@ public void testRewriteWithFunction() throws IOException { */ public void testSingleScriptFunction() throws IOException { QueryBuilder queryBuilder = RandomQueryBuilder.createQuery(random()); - ScoreFunctionBuilder functionBuilder = new ScriptScoreFunctionBuilder( + ScoreFunctionBuilder functionBuilder = new ScriptScoreFunctionBuilder( new Script(ScriptType.INLINE, MockScriptEngine.NAME, "1", Collections.emptyMap())); FunctionScoreQueryBuilder builder = functionScoreQuery(queryBuilder, functionBuilder); @@ -796,8 +798,38 @@ public List> getScoreFunctions() { } } + /** + * Check that this query is generally cacheable except for builders using {@link ScriptScoreFunctionBuilder} or + * {@link RandomScoreFunctionBuilder} without a seed + */ @Override - protected boolean isCacheable(FunctionScoreQueryBuilder queryBuilder) { + public void testCacheability() throws IOException { + FunctionScoreQueryBuilder queryBuilder = createTestQueryBuilder(); + boolean isCacheable = isCacheable(queryBuilder); + QueryShardContext context = createShardContext(); + QueryBuilder rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertEquals("query should " + (isCacheable ? "" : "not") + " be cacheable: " + queryBuilder.toString(), isCacheable, + context.isCacheable()); + + // check the two non-cacheable cases explicitly + ScoreFunctionBuilder scriptScoreFunction = new ScriptScoreFunctionBuilder( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, "1", Collections.emptyMap())); + RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilderWithFixedSeed(); + + for (ScoreFunctionBuilder scoreFunction : List.of(scriptScoreFunction, randomScoreFunctionBuilder)) { + FilterFunctionBuilder[] functions = new FilterFunctionBuilder[] { + new FilterFunctionBuilder(RandomQueryBuilder.createQuery(random()), scoreFunction) }; + queryBuilder = new FunctionScoreQueryBuilder(functions); + + context = createShardContext(); + rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertFalse("query should not be cacheable: " + queryBuilder.toString(), context.isCacheable()); + } + } + + private boolean isCacheable(FunctionScoreQueryBuilder queryBuilder) { FilterFunctionBuilder[] filterFunctionBuilders = queryBuilder.filterFunctionBuilders(); for (FilterFunctionBuilder builder : filterFunctionBuilders) { if (builder.getScoreFunction() instanceof ScriptScoreFunctionBuilder) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 0ce38f3d68144..29c3fc5a27b6a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -428,13 +428,6 @@ public void testToQuery() throws IOException { * we first rewrite the query with a private context, then reset the context and then build the actual lucene query*/ QueryBuilder rewritten = rewriteQuery(firstQuery, new QueryShardContext(context)); Query firstLuceneQuery = rewritten.toQuery(context); - if (isCacheable(firstQuery)) { - assertTrue("query was marked as not cacheable in the context but this test indicates it should be cacheable: " - + firstQuery.toString(), context.isCacheable()); - } else { - assertFalse("query was marked as cacheable in the context but this test indicates it should not be cacheable: " - + firstQuery.toString(), context.isCacheable()); - } assertNotNull("toQuery should not return null", firstLuceneQuery); assertLuceneQuery(firstQuery, firstLuceneQuery, searchContext); //remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well @@ -478,10 +471,6 @@ protected QueryBuilder rewriteQuery(QB queryBuilder, QueryRewriteContext rewrite return rewritten; } - protected boolean isCacheable(QB queryBuilder) { - return true; - } - /** * Few queries allow you to set the boost on the Java API, although the corresponding parser * doesn't parse it as it isn't supported. This method allows to disable boost related tests for those queries. @@ -806,4 +795,17 @@ protected QueryBuilder rewriteAndFetch(QueryBuilder builder, QueryRewriteContext public boolean isTextField(String fieldName) { return fieldName.equals(STRING_FIELD_NAME) || fieldName.equals(STRING_ALIAS_FIELD_NAME); } + + /** + * Check that a query is generally cacheable. Tests for query builders that are not always cacheable + * should overwrite this method and make sure the different cases are always tested + */ + public void testCacheability() throws IOException { + QB queryBuilder = createTestQueryBuilder(); + QueryShardContext context = createShardContext(); + QueryBuilder rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); + assertNotNull(rewriteQuery.toQuery(context)); + assertTrue("query should be cacheable: " + queryBuilder.toString(), context.isCacheable()); + } + } From ed19edb46fe3fbb45fbfb4f3b47087de186ac9fe Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 27 Jun 2019 14:32:47 +0100 Subject: [PATCH 034/140] [ML][DataFrame] Consider data frame templates internal in REST tests (#43692) The data frame index template pattern was not in the list considered as internal and therefore not needing cleanup after every test. --- .../main/java/org/elasticsearch/test/rest/ESRestTestCase.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 90fc04400c476..969cebc0c60b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -944,6 +944,9 @@ private static boolean isXPackTemplate(String name) { if (name.startsWith(".watch") || name.startsWith(".triggered_watches")) { return true; } + if (name.startsWith(".data-frame-")) { + return true; + } if (name.startsWith(".ml-")) { return true; } From 392245b45f41180bd56bb12bce2f1cf554bcd1fe Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Thu, 27 Jun 2019 14:42:27 +0100 Subject: [PATCH 035/140] Remove preconfigured `delimited_payload_filter` (#43686) #41560 removed the delimited_payload_filter as part of a general cleanup of pre-version 7 restrictions, but missed removing the preconfigured version due to #43684. --- .../elasticsearch/analysis/common/CommonAnalysisPlugin.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 5ecd491fef34a..ee6ff73ea4351 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -395,10 +395,6 @@ public List getPreConfiguredTokenFilters() { input -> new CommonGramsFilter(input, CharArraySet.EMPTY_SET))); filters.add(PreConfiguredTokenFilter.singleton("czech_stem", false, CzechStemFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("decimal_digit", true, DecimalDigitFilter::new)); - filters.add(PreConfiguredTokenFilter.singleton("delimited_payload_filter", false, input -> - new DelimitedPayloadTokenFilter(input, - DelimitedPayloadTokenFilterFactory.DEFAULT_DELIMITER, - DelimitedPayloadTokenFilterFactory.DEFAULT_ENCODER))); filters.add(PreConfiguredTokenFilter.singleton("delimited_payload", false, input -> new DelimitedPayloadTokenFilter(input, DelimitedPayloadTokenFilterFactory.DEFAULT_DELIMITER, From 46c7cd905f74a006caa9cd2fa27a44e8dd63e605 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Thu, 27 Jun 2019 15:52:08 +0200 Subject: [PATCH 036/140] Deduplicate two similar TimeUtils classes. * Deduplicate org.elasticsearch.xpack.core.dataframe.utils.TimeUtils and org.elasticsearch.xpack.core.ml.utils.time.TimeUtils into a common class: org.elasticsearch.xpack.core.common.time.TimeUtils. * Add unit tests for parseTimeField and parseTimeFieldToInstant methods --- .../utils => common/time}/TimeUtils.java | 8 +- .../notifications/DataFrameAuditMessage.java | 2 +- .../transforms/DataFrameTransformConfig.java | 2 +- .../xpack/core/ml/annotations/Annotation.java | 2 +- .../core/ml/calendars/ScheduledEvent.java | 2 +- .../core/ml/datafeed/DatafeedConfig.java | 2 +- .../ml/datafeed/DelayedDataCheckConfig.java | 2 +- .../core/ml/job/config/AnalysisConfig.java | 2 +- .../xpack/core/ml/job/config/Job.java | 2 +- .../output/FlushAcknowledgement.java | 2 +- .../process/autodetect/state/DataCounts.java | 2 +- .../autodetect/state/ModelSizeStats.java | 2 +- .../autodetect/state/ModelSnapshot.java | 2 +- .../core/ml/job/results/AnomalyRecord.java | 2 +- .../xpack/core/ml/job/results/Bucket.java | 2 +- .../core/ml/job/results/BucketInfluencer.java | 2 +- .../xpack/core/ml/job/results/Forecast.java | 2 +- .../xpack/core/ml/job/results/Influencer.java | 2 +- .../xpack/core/ml/job/results/ModelPlot.java | 2 +- .../core/ml/notifications/AuditMessage.java | 2 +- .../xpack/core/ml/utils/time/TimeUtils.java | 129 ------------------ .../AbstractAuditMessageTests.java | 2 +- .../core/common}/time/TimeUtilsTests.java | 49 ++++++- .../autodetect/params/FlushJobParams.java | 2 +- .../process/autodetect/params/TimeRange.java | 2 +- 25 files changed, 71 insertions(+), 159 deletions(-) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/{dataframe/utils => common/time}/TimeUtils.java (96%) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java rename x-pack/plugin/{ml/src/test/java/org/elasticsearch/xpack/ml/utils => core/src/test/java/org/elasticsearch/xpack/core/common}/time/TimeUtilsTests.java (64%) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/utils/TimeUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/time/TimeUtils.java similarity index 96% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/utils/TimeUtils.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/time/TimeUtils.java index 21a4692f547af..e345feb59b04e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/utils/TimeUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/time/TimeUtils.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.dataframe.utils; +package org.elasticsearch.xpack.core.common.time; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; @@ -26,7 +26,7 @@ public static Date parseTimeField(XContentParser parser, String fieldName) throw if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { return new Date(parser.longValue()); } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(parser.text())); + return new Date(dateStringToEpoch(parser.text())); } throw new IllegalArgumentException( "unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); @@ -36,7 +36,7 @@ public static Instant parseTimeFieldToInstant(XContentParser parser, String fiel if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { return Instant.ofEpochMilli(parser.longValue()); } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - return Instant.ofEpochMilli(TimeUtils.dateStringToEpoch(parser.text())); + return Instant.ofEpochMilli(dateStringToEpoch(parser.text())); } throw new IllegalArgumentException( "unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); @@ -123,8 +123,6 @@ private static void checkNonNegative(TimeValue timeValue, ParseField field) { } } - - /** * Check the given {@code timeValue} is a multiple of the {@code baseUnit} */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/notifications/DataFrameAuditMessage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/notifications/DataFrameAuditMessage.java index dd6aee25580df..e0ebd8e97d9a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/notifications/DataFrameAuditMessage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/notifications/DataFrameAuditMessage.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessage; import org.elasticsearch.xpack.core.common.notifications.Level; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.core.dataframe.utils.TimeUtils; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfig.java index 2762e0507ef06..e3ad50d9b889e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfig.java @@ -20,11 +20,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.core.dataframe.transforms.pivot.PivotConfig; import org.elasticsearch.xpack.core.dataframe.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.dataframe.utils.TimeUtils; import java.io.IOException; import java.time.Instant; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/Annotation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/Annotation.java index 185808de00463..91c4053ed1568 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/Annotation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/Annotation.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java index afd10e0c17b01..03bb25d3652e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java @@ -23,7 +23,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.Intervals; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.time.Instant; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 54d4869bf2824..9285256c76819 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -33,7 +33,7 @@ import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DelayedDataCheckConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DelayedDataCheckConfig.java index 9406b91d119c7..52de35af1fac3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DelayedDataCheckConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DelayedDataCheckConfig.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.Objects; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java index 9e01cd21e2b90..07b823f27768c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index 1cb44f9625cb5..5a2b2314e8d53 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlStrings; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java index ff47cfe1ca85c..932e51a60f151 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java index 8d542ce25af93..13e6459ca37eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java index f02120433efc4..2e78ab39fe6e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.results.Result; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java index 02bef36c00ab0..268da28b1d07e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.io.InputStream; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java index 5026c387e0870..ab32373644d19 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java index e34bc648cadae..f4134033ff8dc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java index e6031b3b8df52..8cb06b6e0a22d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java index 03a9b801167e7..5f4e3c829c3d5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Forecast.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java index d226058bf1db4..d17b375459bb4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java index 60c8c30b94230..ab7235ca27ac4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.io.IOException; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditMessage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditMessage.java index c3328bb3263fd..6daa4223afd73 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditMessage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/AuditMessage.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessage; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.util.Date; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java deleted file mode 100644 index ea0994dad717c..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.ml.utils.time; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.DateFieldMapper; - -import java.io.IOException; -import java.util.Date; -import java.util.concurrent.TimeUnit; - -public final class TimeUtils { - - private TimeUtils() { - // Do nothing - } - - public static Date parseTimeField(XContentParser parser, String fieldName) throws IOException { - if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return new Date(parser.longValue()); - } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(parser.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); - } - - /** - * First tries to parse the date first as a Long and convert that to an - * epoch time. If the long number has more than 10 digits it is considered a - * time in milliseconds else if 10 or less digits it is in seconds. If that - * fails it tries to parse the string using - * {@link DateFieldMapper#DEFAULT_DATE_TIME_FORMATTER} - * - * If the date string cannot be parsed -1 is returned. - * - * @return The epoch time in milliseconds or -1 if the date cannot be - * parsed. - */ - public static long dateStringToEpoch(String date) { - try { - long epoch = Long.parseLong(date); - if (date.trim().length() <= 10) { // seconds - return epoch * 1000; - } else { - return epoch; - } - } catch (NumberFormatException nfe) { - // not a number - } - - try { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date); - } catch (ElasticsearchParseException | IllegalArgumentException e) { - } - // Could not do the conversion - return -1; - } - - /** - * Checks that the given {@code timeValue} is a non-negative multiple value of the {@code baseUnit}. - * - *

      - *
    • 400ms is valid for base unit of seconds
    • - *
    • 450ms is invalid for base unit of seconds but valid for base unit of milliseconds
    • - *
    - */ - public static void checkNonNegativeMultiple(TimeValue timeValue, TimeUnit baseUnit, ParseField field) { - checkNonNegative(timeValue, field); - checkMultiple(timeValue, baseUnit, field); - } - - /** - * Checks that the given {@code timeValue} is a positive multiple value of the {@code baseUnit}. - * - *
      - *
    • 400ms is valid for base unit of seconds
    • - *
    • 450ms is invalid for base unit of seconds but valid for base unit of milliseconds
    • - *
    - */ - public static void checkPositiveMultiple(TimeValue timeValue, TimeUnit baseUnit, ParseField field) { - checkPositive(timeValue, field); - checkMultiple(timeValue, baseUnit, field); - } - - /** - * Checks that the given {@code timeValue} is positive. - * - *
      - *
    • 1s is valid
    • - *
    • -1s is invalid
    • - *
    - */ - public static void checkPositive(TimeValue timeValue, ParseField field) { - long nanos = timeValue.getNanos(); - if (nanos <= 0) { - throw new IllegalArgumentException(field.getPreferredName() + " cannot be less or equal than 0. Value = " - + timeValue.toString()); - } - } - - private static void checkNonNegative(TimeValue timeValue, ParseField field) { - long nanos = timeValue.getNanos(); - if (nanos < 0) { - throw new IllegalArgumentException(field.getPreferredName() + " cannot be less than 0. Value = " + timeValue.toString()); - } - } - - - - /** - * Check the given {@code timeValue} is a multiple of the {@code baseUnit} - */ - public static void checkMultiple(TimeValue timeValue, TimeUnit baseUnit, ParseField field) { - long nanos = timeValue.getNanos(); - TimeValue base = new TimeValue(1, baseUnit); - long baseNanos = base.getNanos(); - if (nanos % baseNanos != 0) { - throw new IllegalArgumentException(field.getPreferredName() + " has to be a multiple of " + base.toString() + "; actual was '" - + timeValue.toString() + "'"); - } - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditMessageTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditMessageTests.java index 8fb425698376e..e87e2cb0d93dc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditMessageTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditMessageTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import org.junit.Before; import java.util.Date; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/time/TimeUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/time/TimeUtilsTests.java similarity index 64% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/time/TimeUtilsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/time/TimeUtilsTests.java index d33968a37cfa7..e122202b5fa6c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/time/TimeUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/time/TimeUtilsTests.java @@ -3,18 +3,61 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.utils.time; +package org.elasticsearch.xpack.core.common.time; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import java.io.IOException; +import java.time.Instant; +import java.util.Date; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + public class TimeUtilsTests extends ESTestCase { - public void testdateStringToEpoch() { + public void testParseTimeField() throws IOException { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "123456789")) { + parser.nextToken(); + Date date = TimeUtils.parseTimeField(parser, "my_time_field"); + assertThat(date.getTime(), equalTo(123456789L)); + } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "\"2016-05-01T10:00:00.333-0030\"")) { + parser.nextToken(); + Date date = TimeUtils.parseTimeField(parser, "my_time_field"); + assertThat(date.getTime(), equalTo(1462098600333L)); + } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{}")) { + parser.nextToken(); + Exception e = expectThrows(IllegalArgumentException.class, () -> TimeUtils.parseTimeField(parser, "my_time_field")); + assertThat(e.getMessage(), containsString("unexpected token [START_OBJECT] for [my_time_field]")); + } + } + + public void testParseTimeFieldToInstant() throws IOException { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "123456789")) { + parser.nextToken(); + Instant instant = TimeUtils.parseTimeFieldToInstant(parser, "my_time_field"); + assertThat(instant.toEpochMilli(), equalTo(123456789L)); + } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "\"2016-05-01T10:00:00.333-0030\"")) { + parser.nextToken(); + Instant instant = TimeUtils.parseTimeFieldToInstant(parser, "my_time_field"); + assertThat(instant.toEpochMilli(), equalTo(1462098600333L)); + } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{}")) { + parser.nextToken(); + Exception e = expectThrows(IllegalArgumentException.class, () -> TimeUtils.parseTimeFieldToInstant(parser, "my_time_field")); + assertThat(e.getMessage(), containsString("unexpected token [START_OBJECT] for [my_time_field]")); + } + } + + public void testDateStringToEpoch() { assertEquals(1462096800000L, TimeUtils.dateStringToEpoch("2016-05-01T10:00:00Z")); assertEquals(1462096800333L, TimeUtils.dateStringToEpoch("2016-05-01T10:00:00.333Z")); assertEquals(1462096800334L, TimeUtils.dateStringToEpoch("2016-05-01T10:00:00.334+00")); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/FlushJobParams.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/FlushJobParams.java index fd813e27fda93..354b2d4c1b669 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/FlushJobParams.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/FlushJobParams.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.util.Objects; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/TimeRange.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/TimeRange.java index a14d810d0d2f9..d99a8351c7bef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/TimeRange.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/TimeRange.java @@ -9,7 +9,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import java.util.Objects; From 0a0eb974751bccdbc7e18ca81b9f45f3fc7b87f4 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 27 Jun 2019 16:56:15 +0200 Subject: [PATCH 037/140] Fix UOE on search requests that match a sparse role query (#43668) Search requests executed through the SecurityIndexSearcherWrapper throw an UnsupportedOperationException if they match a sparse role query. When low level cancellation is activated (which is the default since #42857), the context index searcher creates a weight that doesn't handle #scorer. This change fixes this bug and adds a test to ensure that we check this case. --- .../search/internal/ContextIndexSearcher.java | 7 ++++--- .../SecurityIndexSearcherWrapperUnitTests.java | 8 +++++++- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 7c56796f3d24d..49c310ba706b6 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -152,13 +152,14 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public boolean isCacheable(LeafReaderContext ctx) { throw new UnsupportedOperationException(); } @Override - public boolean isCacheable(LeafReaderContext ctx) { - throw new UnsupportedOperationException(); + public Scorer scorer(LeafReaderContext context) throws IOException { + // in case the wrapped searcher (in) uses the scorer directly + return weight.scorer(context); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java index b9eb0241d9a3e..3da3949bad967 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SeqNoFieldMapper; @@ -52,6 +53,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader; @@ -537,7 +539,11 @@ public void onRemoval(ShardId shardId, Accountable accountable) { } DocumentSubsetDirectoryReader filteredReader = DocumentSubsetReader.wrap(reader, cache, roleQuery); - IndexSearcher searcher = new SecurityIndexSearcherWrapper.IndexSearcherWrapper(filteredReader); + IndexSearcher wrapSearcher = new SecurityIndexSearcherWrapper.IndexSearcherWrapper(filteredReader); + Engine.Searcher engineSearcher = new Engine.Searcher("test", wrapSearcher, () -> {}); + ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher, + wrapSearcher.getQueryCache(), wrapSearcher.getQueryCachingPolicy()); + searcher.setCheckCancelled(() -> {}); // Searching a non-existing term will trigger a null scorer assertEquals(0, searcher.count(new TermQuery(new Term("non_existing_field", "non_existing_value")))); From b77a6fc6835ce7a519db8dc49f0cee888c3b2db6 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 27 Jun 2019 17:00:21 +0200 Subject: [PATCH 038/140] Fix propagation of enablePositionIncrements in QueryStringQueryBuilder (#43578) This change fixes the propagation of the enablePositionIncrements option to the underlying QueryBuilder. Closes #43574 --- .../index/search/MatchQuery.java | 7 ++-- .../index/search/MultiMatchQuery.java | 11 ++++--- .../index/search/QueryStringQueryParser.java | 6 ++++ .../query/QueryStringQueryBuilderTests.java | 33 ++++++++----------- 4 files changed, 30 insertions(+), 27 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java index d2e99aa8eb780..8a43dfbdb3f96 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -238,7 +238,7 @@ public Query parse(Type type, String fieldName, Object value) throws IOException Analyzer analyzer = getAnalyzer(fieldType, type == Type.PHRASE || type == Type.PHRASE_PREFIX); assert analyzer != null; - MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType); + MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType, enablePositionIncrements, autoGenerateSynonymsPhraseQuery); /* * If a keyword analyzer is used, we know that further analysis isn't @@ -316,15 +316,16 @@ class MatchQueryBuilder extends QueryBuilder { /** * Creates a new QueryBuilder using the given analyzer. */ - MatchQueryBuilder(Analyzer analyzer, MappedFieldType fieldType) { + MatchQueryBuilder(Analyzer analyzer, MappedFieldType fieldType, + boolean enablePositionIncrements, boolean autoGenerateSynonymsPhraseQuery) { super(analyzer); this.fieldType = fieldType; + setEnablePositionIncrements(enablePositionIncrements); if (hasPositions(fieldType)) { setAutoGenerateMultiTermSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); } else { setAutoGenerateMultiTermSynonymsPhraseQuery(false); } - setEnablePositionIncrements(enablePositionIncrements); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index a753307af29be..cbc06a6ff081d 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -138,9 +138,11 @@ private List buildCrossFieldQuery(MultiMatchQueryBuilder.Type type, Map> group : groups.entrySet()) { final MatchQueryBuilder builder; if (group.getValue().size() == 1) { - builder = new MatchQueryBuilder(group.getKey(), group.getValue().get(0).fieldType); + builder = new MatchQueryBuilder(group.getKey(), group.getValue().get(0).fieldType, + enablePositionIncrements, autoGenerateSynonymsPhraseQuery); } else { - builder = new BlendedQueryBuilder(group.getKey(), group.getValue(), tieBreaker); + builder = new BlendedQueryBuilder(group.getKey(), group.getValue(), tieBreaker, + enablePositionIncrements, autoGenerateSynonymsPhraseQuery); } /* @@ -170,8 +172,9 @@ private class BlendedQueryBuilder extends MatchQueryBuilder { private final List blendedFields; private final float tieBreaker; - BlendedQueryBuilder(Analyzer analyzer, List blendedFields, float tieBreaker) { - super(analyzer, blendedFields.get(0).fieldType); + BlendedQueryBuilder(Analyzer analyzer, List blendedFields, float tieBreaker, + boolean enablePositionIncrements, boolean autoGenerateSynonymsPhraseQuery) { + super(analyzer, blendedFields.get(0).fieldType, enablePositionIncrements, autoGenerateSynonymsPhraseQuery); this.blendedFields = blendedFields; this.tieBreaker = tieBreaker; } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 9508e439d597c..22be2131e3347 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -153,6 +153,12 @@ private QueryStringQueryParser(QueryShardContext context, String defaultField, this.lenient = lenient; } + @Override + public void setEnablePositionIncrements(boolean enable) { + super.setEnablePositionIncrements(enable); + queryBuilder.setEnablePositionIncrements(enable); + } + @Override public void setDefaultOperator(Operator op) { super.setDefaultOperator(op); diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index ddac2be97b1f0..3e9da2f2e5099 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -765,26 +765,6 @@ public void testToQueryRegExpQueryMaxDeterminizedStatesParsing() throws Exceptio assertThat(e.getMessage(), containsString("would result in more than 10 states")); } - /** - * Validates that {@code max_determinized_states} can be parsed and lowers the allowed number of determinized states. - */ - public void testEnabledPositionIncrements() throws Exception { - - XContentBuilder builder = JsonXContent.contentBuilder(); - builder.startObject(); { - builder.startObject("query_string"); { - builder.field("query", "text"); - builder.field("default_field", STRING_FIELD_NAME); - builder.field("enable_position_increments", false); - } - builder.endObject(); - } - builder.endObject(); - - QueryStringQueryBuilder queryBuilder = (QueryStringQueryBuilder) parseInnerQueryBuilder(createParser(builder)); - assertFalse(queryBuilder.enablePositionIncrements()); - } - public void testToQueryFuzzyQueryAutoFuziness() throws Exception { for (int i = 0; i < 3; i++) { final int len; @@ -1438,6 +1418,19 @@ public void testWithStopWords() throws Exception { assertEquals(expected, query); } + public void testEnablePositionIncrement() throws Exception { + Query query = new QueryStringQueryBuilder("\"quick the fox\"") + .field(STRING_FIELD_NAME) + .analyzer("stop") + .enablePositionIncrements(false) + .toQuery(createShardContext()); + PhraseQuery expected = new PhraseQuery.Builder() + .add(new Term(STRING_FIELD_NAME, "quick")) + .add(new Term(STRING_FIELD_NAME, "fox")) + .build(); + assertEquals(expected, query); + } + public void testWithPrefixStopWords() throws Exception { Query query = new QueryStringQueryBuilder("the* quick fox") .field(STRING_FIELD_NAME) From a6144158384876d259e59986af7c2fd14b67f6cc Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 27 Jun 2019 17:30:02 +0200 Subject: [PATCH 039/140] Remove deprecated sort options: nested_path and nested_filter (#42809) This commit removes the nested_path and nested_filter options deprecated in 6x. This change also checks that the sort field has a [nested] option if it is under a nested object and throws an exception if it's not the case. Closes #27098 --- .../migration/migrate_8_0/search.asciidoc | 5 + docs/reference/search/request/sort.asciidoc | 16 +- .../200_top_hits_metric.yml | 9 +- .../index/search/NestedHelper.java | 2 +- .../search/sort/FieldSortBuilder.java | 231 +++++++----------- .../search/sort/GeoDistanceSortBuilder.java | 129 ++-------- .../search/sort/ScriptSortBuilder.java | 118 ++------- .../search/sort/SortBuilder.java | 13 +- .../search/nested/SimpleNestedIT.java | 64 +++-- .../search/scroll/DuelScrollIT.java | 9 +- .../search/sort/FieldSortBuilderTests.java | 73 +----- .../search/sort/FieldSortIT.java | 15 +- .../search/sort/GeoDistanceIT.java | 26 +- .../sort/GeoDistanceSortBuilderTests.java | 90 ++----- .../search/sort/ScriptSortBuilderTests.java | 36 +-- .../search/sort/SortBuilderTests.java | 10 - 16 files changed, 257 insertions(+), 589 deletions(-) diff --git a/docs/reference/migration/migrate_8_0/search.asciidoc b/docs/reference/migration/migrate_8_0/search.asciidoc index 6fba2970f593e..97796a10fca22 100644 --- a/docs/reference/migration/migrate_8_0/search.asciidoc +++ b/docs/reference/migration/migrate_8_0/search.asciidoc @@ -20,3 +20,8 @@ The same functionality can be achieved by the `match` query if the total number The `cutoff_frequency` parameter was deprecated in 7.x and has been removed in 8.0 from `match` and `multi_match` queries. The same functionality can be achieved without any configuration provided that the total number of hits is not tracked. + +[float] +===== Removal of sort parameters + +The `nested_filter` and `nested_path` options, deprecated in 6.x, have been removed in favor of the `nested` context. diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index c12ec3a679a72..ccbc3da6e063b 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -252,7 +252,7 @@ field support has a `nested` sort option with the following properties: A filter that the inner objects inside the nested path should match with in order for its field values to be taken into account by sorting. Common case is to repeat the query / filter inside the - nested filter or query. By default no `nested_filter` is active. + nested filter or query. By default no `filter` is active. `max_children`:: The maximum number of children to consider per root document when picking the sort value. Defaults to unlimited. @@ -260,14 +260,8 @@ field support has a `nested` sort option with the following properties: Same as top-level `nested` but applies to another nested path within the current nested object. -[WARNING] -.Nested sort options before Elasticsearch 6.1 -============================================ - -The `nested_path` and `nested_filter` options have been deprecated in -favor of the options documented above. - -============================================ +NOTE: Elasticsearch will throw an error if a nested field is defined in a sort without +a `nested` context. ===== Nested sorting examples @@ -300,7 +294,7 @@ POST /_search // CONSOLE In the below example `parent` and `child` fields are of type `nested`. -The `nested_path` needs to be specified at each level; otherwise, Elasticsearch doesn't know on what nested level sort values need to be captured. +The `nested.path` needs to be specified at each level; otherwise, Elasticsearch doesn't know on what nested level sort values need to be captured. [source,js] -------------------------------------------------- @@ -374,7 +368,7 @@ GET /_search // CONSOLE NOTE: If a nested inner object doesn't match with -the `nested_filter` then a missing value is used. +the `nested.filter` then a missing value is used. ==== Ignoring Unmapped Fields diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml index cde56fa41e3d9..76274e9034d62 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml @@ -92,16 +92,19 @@ setup: aggs: users: top_hits: - sort: "users.last.keyword" + sort: + users.last.keyword: + nested: + path: users seq_no_primary_term: true - match: { hits.total: 2 } - length: { aggregations.groups.buckets.0.users.hits.hits: 2 } - - match: { aggregations.groups.buckets.0.users.hits.hits.0._id: "1" } + - match: { aggregations.groups.buckets.0.users.hits.hits.0._id: "2" } - match: { aggregations.groups.buckets.0.users.hits.hits.0._index: my-index } - gte: { aggregations.groups.buckets.0.users.hits.hits.0._seq_no: 0 } - gte: { aggregations.groups.buckets.0.users.hits.hits.0._primary_term: 1 } - - match: { aggregations.groups.buckets.0.users.hits.hits.1._id: "2" } + - match: { aggregations.groups.buckets.0.users.hits.hits.1._id: "1" } - match: { aggregations.groups.buckets.0.users.hits.hits.1._index: my-index } - gte: { aggregations.groups.buckets.0.users.hits.hits.1._seq_no: 0 } - gte: { aggregations.groups.buckets.0.users.hits.hits.1._primary_term: 1 } diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index 1c17fa0cb935f..ab2ae044bdbaf 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -192,7 +192,7 @@ boolean mightMatchNonNestedDocs(String field, String nestedPath) { return true; // the field is not a sub field of the nested path } - private static String parentObject(String field) { + public static String parentObject(String field) { int lastDot = field.lastIndexOf('.'); if (lastDot == -1) { return null; diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 6598d32bc2ca8..949a5a3ff441c 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -19,13 +19,11 @@ package org.elasticsearch.search.sort; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.SortField; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,6 +34,7 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.SortedNumericDVIndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; @@ -47,13 +46,13 @@ import java.util.Locale; import java.util.Objects; +import static org.elasticsearch.index.search.NestedHelper.parentObject; import static org.elasticsearch.search.sort.NestedSortBuilder.NESTED_FIELD; /** * A sort builder to sort based on a document field. */ public class FieldSortBuilder extends SortBuilder { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(FieldSortBuilder.class)); public static final String NAME = "field_sort"; public static final ParseField MISSING = new ParseField("missing"); @@ -80,10 +79,6 @@ public class FieldSortBuilder extends SortBuilder { private SortMode sortMode; - private QueryBuilder nestedFilter; - - private String nestedPath; - private NestedSortBuilder nestedSort; /** Copy constructor. */ @@ -95,8 +90,6 @@ public FieldSortBuilder(FieldSortBuilder template) { if (template.sortMode != null) { this.sortMode(template.sortMode()); } - this.setNestedFilter(template.getNestedFilter()); - this.setNestedPath(template.getNestedPath()); if (template.getNestedSort() != null) { this.setNestedSort(template.getNestedSort()); } @@ -121,8 +114,12 @@ public FieldSortBuilder(String fieldName) { */ public FieldSortBuilder(StreamInput in) throws IOException { fieldName = in.readString(); - nestedFilter = in.readOptionalNamedWriteable(QueryBuilder.class); - nestedPath = in.readOptionalString(); + if (in.getVersion().before(Version.V_8_0_0)) { + if (in.readOptionalNamedWriteable(QueryBuilder.class) != null || in.readOptionalString() != null) { + throw new IOException("the [sort] options [nested_path] and [nested_filter] are removed in 8.x, " + + "please use [nested] instead"); + } + } missing = in.readGenericValue(); order = in.readOptionalWriteable(SortOrder::readFromStream); sortMode = in.readOptionalWriteable(SortMode::readFromStream); @@ -136,8 +133,10 @@ public FieldSortBuilder(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(fieldName); - out.writeOptionalNamedWriteable(nestedFilter); - out.writeOptionalString(nestedPath); + if (out.getVersion().before(Version.V_8_0_0)) { + out.writeOptionalNamedWriteable(null); + out.writeOptionalString(null); + } out.writeGenericValue(missing); out.writeOptionalWriteable(order); out.writeOptionalWriteable(sortMode); @@ -210,58 +209,6 @@ public SortMode sortMode() { return this.sortMode; } - /** - * Sets the nested filter that the nested objects should match with in order - * to be taken into account for sorting. - * - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} and retrieve with {@link #getNestedSort()} - */ - @Deprecated - public FieldSortBuilder setNestedFilter(QueryBuilder nestedFilter) { - if (this.nestedSort != null) { - throw new IllegalArgumentException("Setting both nested_path/nested_filter and nested not allowed"); - } - this.nestedFilter = nestedFilter; - return this; - } - - /** - * Returns the nested filter that the nested objects should match with in - * order to be taken into account for sorting. - * - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} and retrieve with {@link #getNestedSort()} - */ - @Deprecated - public QueryBuilder getNestedFilter() { - return this.nestedFilter; - } - - /** - * Sets the nested path if sorting occurs on a field that is inside a nested - * object. By default when sorting on a field inside a nested object, the - * nearest upper nested object is selected as nested path. - * - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} and retrieve with {@link #getNestedSort()} - */ - @Deprecated - public FieldSortBuilder setNestedPath(String nestedPath) { - if (this.nestedSort != null) { - throw new IllegalArgumentException("Setting both nested_path/nested_filter and nested not allowed"); - } - this.nestedPath = nestedPath; - return this; - } - - /** - * Returns the nested path if sorting occurs in a field that is inside a - * nested object. - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} and retrieve with {@link #getNestedSort()} - */ - @Deprecated - public String getNestedPath() { - return this.nestedPath; - } - /** * Returns the {@link NestedSortBuilder} */ @@ -276,9 +223,6 @@ public NestedSortBuilder getNestedSort() { * order to be taken into account for sorting. */ public FieldSortBuilder setNestedSort(final NestedSortBuilder nestedSort) { - if (this.nestedFilter != null || this.nestedPath != null) { - throw new IllegalArgumentException("Setting both nested_path/nested_filter and nested not allowed"); - } this.nestedSort = nestedSort; return this; } @@ -330,12 +274,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (sortMode != null) { builder.field(SORT_MODE.getPreferredName(), sortMode); } - if (nestedFilter != null) { - builder.field(NESTED_FILTER_FIELD.getPreferredName(), nestedFilter, params); - } - if (nestedPath != null) { - builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath); - } if (nestedSort != null) { builder.field(NESTED_FIELD.getPreferredName(), nestedSort); } @@ -367,65 +305,85 @@ private static NumericType resolveNumericType(String value) { @Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { if (DOC_FIELD_NAME.equals(fieldName)) { - if (order == SortOrder.DESC) { - return SORT_DOC_REVERSE; + return order == SortOrder.DESC ? SORT_DOC_REVERSE : SORT_DOC; + } + + boolean isUnmapped = false; + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType == null) { + isUnmapped = true; + if (unmappedType != null) { + fieldType = context.getMapperService().unmappedFieldType(unmappedType); } else { - return SORT_DOC; - } - } else { - boolean isUnmapped = false; - MappedFieldType fieldType = context.fieldMapper(fieldName); - if (fieldType == null) { - isUnmapped = true; - if (unmappedType != null) { - fieldType = context.getMapperService().unmappedFieldType(unmappedType); - } else { - throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on"); - } + throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on"); } + } - MultiValueMode localSortMode = null; - if (sortMode != null) { - localSortMode = MultiValueMode.fromString(sortMode.toString()); - } + MultiValueMode localSortMode = null; + if (sortMode != null) { + localSortMode = MultiValueMode.fromString(sortMode.toString()); + } - boolean reverse = (order == SortOrder.DESC); - if (localSortMode == null) { - localSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; - } + boolean reverse = (order == SortOrder.DESC); + if (localSortMode == null) { + localSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; + } - Nested nested = null; - if (isUnmapped == false) { - if (nestedSort != null) { - if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { - throw new QueryShardException(context, - "max_children is only supported on last level of nested sort"); - } - // new nested sorts takes priority - nested = resolveNested(context, nestedSort); - } else { - nested = resolveNested(context, nestedPath, nestedFilter); + Nested nested = null; + if (isUnmapped == false) { + if (nestedSort != null) { + if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on last level of nested sort"); } + nested = resolveNested(context, nestedSort); + } else { + validateMissingNestedPath(context, fieldName); } + } - IndexFieldData fieldData = context.getForField(fieldType); - if (fieldData instanceof IndexNumericFieldData == false - && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { - throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); + IndexFieldData fieldData = context.getForField(fieldType); + if (fieldData instanceof IndexNumericFieldData == false + && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { + throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); + } + final SortField field; + if (numericType != null) { + if (fieldData instanceof IndexNumericFieldData == false) { + throw new QueryShardException(context, + "[numeric_type] option cannot be set on a non-numeric field, got " + fieldType.typeName()); } - final SortField field; - if (numericType != null) { - if (fieldData instanceof IndexNumericFieldData == false) { + SortedNumericDVIndexFieldData numericFieldData = (SortedNumericDVIndexFieldData) fieldData; + NumericType resolvedType = resolveNumericType(numericType); + field = numericFieldData.sortField(resolvedType, missing, localSortMode, nested, reverse); + } else { + field = fieldData.sortField(missing, localSortMode, nested, reverse); + } + return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null)); + } + + /** + * Throws an exception if the provided field requires a nested context. + */ + static void validateMissingNestedPath(QueryShardContext context, String field) { + ObjectMapper contextMapper = context.nestedScope().getObjectMapper(); + if (contextMapper != null && contextMapper.nested().isNested() == false) { + // already in nested context + return; + } + for (String parent = parentObject(field); parent != null; parent = parentObject(parent)) { + ObjectMapper parentMapper = context.getObjectMapper(parent); + if (parentMapper != null && parentMapper.nested().isNested()) { + if (contextMapper != null && contextMapper.fullPath().equals(parentMapper.fullPath())) { + // we are in a nested context that matches the path of the provided field so the nested path + // is not required + return ; + } + if (parentMapper.nested().isIncludeInRoot() == false) { throw new QueryShardException(context, - "[numeric_type] option cannot be set on a non-numeric field, got " + fieldType.typeName()); + "it is mandatory to set the [nested] context on the nested sort field: [" + field + "]."); } - SortedNumericDVIndexFieldData numericFieldData = (SortedNumericDVIndexFieldData) fieldData; - NumericType resolvedType = resolveNumericType(numericType); - field = numericFieldData.sortField(resolvedType, missing, localSortMode, nested, reverse); - } else { - field = fieldData.sortField(missing, localSortMode, nested, reverse); } - return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null)); } } @@ -440,8 +398,7 @@ public boolean equals(Object other) { } FieldSortBuilder builder = (FieldSortBuilder) other; - return (Objects.equals(this.fieldName, builder.fieldName) && Objects.equals(this.nestedFilter, builder.nestedFilter) - && Objects.equals(this.nestedPath, builder.nestedPath) && Objects.equals(this.missing, builder.missing) + return (Objects.equals(this.fieldName, builder.fieldName) && Objects.equals(this.missing, builder.missing) && Objects.equals(this.order, builder.order) && Objects.equals(this.sortMode, builder.sortMode) && Objects.equals(this.unmappedType, builder.unmappedType) && Objects.equals(this.nestedSort, builder.nestedSort)) && Objects.equals(this.numericType, builder.numericType); @@ -449,7 +406,7 @@ public boolean equals(Object other) { @Override public int hashCode() { - return Objects.hash(this.fieldName, this.nestedFilter, this.nestedPath, this.nestedSort, this.missing, this.order, this.sortMode, + return Objects.hash(this.fieldName, this.nestedSort, this.missing, this.order, this.sortMode, this.unmappedType, this.numericType); } @@ -475,38 +432,22 @@ public static FieldSortBuilder fromXContent(XContentParser parser, String fieldN static { PARSER.declareField(FieldSortBuilder::missing, p -> p.objectText(), MISSING, ValueType.VALUE); - PARSER.declareString((fieldSortBuilder, nestedPath) -> { - deprecationLogger.deprecated("[nested_path] has been deprecated in favor of the [nested] parameter"); - fieldSortBuilder.setNestedPath(nestedPath); - }, NESTED_PATH_FIELD); PARSER.declareString(FieldSortBuilder::unmappedType , UNMAPPED_TYPE); PARSER.declareString((b, v) -> b.order(SortOrder.fromString(v)) , ORDER_FIELD); PARSER.declareString((b, v) -> b.sortMode(SortMode.fromString(v)), SORT_MODE); - PARSER.declareObject(FieldSortBuilder::setNestedFilter, (p, c) -> { - deprecationLogger.deprecated("[nested_filter] has been deprecated in favour for the [nested] parameter"); - return SortBuilder.parseNestedFilter(p); - }, NESTED_FILTER_FIELD); PARSER.declareObject(FieldSortBuilder::setNestedSort, (p, c) -> NestedSortBuilder.fromXContent(p), NESTED_FIELD); PARSER.declareString((b, v) -> b.setNumericType(v), NUMERIC_TYPE); } @Override public FieldSortBuilder rewrite(QueryRewriteContext ctx) throws IOException { - if (nestedFilter == null && nestedSort == null) { + if (nestedSort == null) { return this; } - if (nestedFilter != null) { - QueryBuilder rewrite = nestedFilter.rewrite(ctx); - if (nestedFilter == rewrite) { - return this; - } - return new FieldSortBuilder(this).setNestedFilter(rewrite); - } else { - NestedSortBuilder rewrite = nestedSort.rewrite(ctx); - if (nestedSort == rewrite) { - return this; - } - return new FieldSortBuilder(this).setNestedSort(rewrite); + NestedSortBuilder rewrite = nestedSort.rewrite(ctx); + if (nestedSort == rewrite) { + return this; } + return new FieldSortBuilder(this).setNestedSort(rewrite); } } diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 73877b3faa309..630b93b4f34b5 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.sort; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; @@ -28,6 +27,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoDistance; @@ -35,7 +35,6 @@ import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -64,14 +63,13 @@ import java.util.Locale; import java.util.Objects; -import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; +import static org.elasticsearch.search.sort.FieldSortBuilder.validateMissingNestedPath; import static org.elasticsearch.search.sort.NestedSortBuilder.NESTED_FIELD; /** * A geo distance based sorting on a geo point like field. */ public class GeoDistanceSortBuilder extends SortBuilder { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(GeoDistanceSortBuilder.class)); public static final String NAME = "_geo_distance"; public static final String ALTERNATIVE_NAME = "_geoDistance"; @@ -90,8 +88,6 @@ public class GeoDistanceSortBuilder extends SortBuilder private DistanceUnit unit = DistanceUnit.DEFAULT; private SortMode sortMode = null; - private QueryBuilder nestedFilter; - private String nestedPath; private NestedSortBuilder nestedSort; @@ -150,8 +146,6 @@ public GeoDistanceSortBuilder(String fieldName, String ... geohashes) { this.unit = original.unit; this.order = original.order; this.sortMode = original.sortMode; - this.nestedFilter = original.nestedFilter; - this.nestedPath = original.nestedPath; this.validation = original.validation; this.nestedSort = original.nestedSort; this.ignoreUnmapped = original.ignoreUnmapped; @@ -168,8 +162,13 @@ public GeoDistanceSortBuilder(StreamInput in) throws IOException { unit = DistanceUnit.readFromStream(in); order = SortOrder.readFromStream(in); sortMode = in.readOptionalWriteable(SortMode::readFromStream); - nestedFilter = in.readOptionalNamedWriteable(QueryBuilder.class); - nestedPath = in.readOptionalString(); + if (in.getVersion().before(Version.V_8_0_0)) { + if (in.readOptionalNamedWriteable(QueryBuilder.class) != null || in.readOptionalString() != null) { + throw new IOException("the [sort] options [nested_path] and [nested_filter] are removed in 8.x, " + + "please use [nested] instead"); + } + + } nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); validation = GeoValidationMethod.readFromStream(in); ignoreUnmapped = in.readBoolean(); @@ -183,8 +182,10 @@ public void writeTo(StreamOutput out) throws IOException { unit.writeTo(out); order.writeTo(out); out.writeOptionalWriteable(sortMode); - out.writeOptionalNamedWriteable(nestedFilter); - out.writeOptionalString(nestedPath); + if (out.getVersion().before(Version.V_8_0_0)) { + out.writeOptionalNamedWriteable(null); + out.writeOptionalString(null); + } out.writeOptionalWriteable(nestedSort); validation.writeTo(out); out.writeBoolean(ignoreUnmapped); @@ -288,59 +289,6 @@ public SortMode sortMode() { return this.sortMode; } - /** - * Sets the nested filter that the nested objects should match with in order to - * be taken into account for sorting. - * - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} - * and retrieve with {@link #getNestedSort()} - **/ - @Deprecated - public GeoDistanceSortBuilder setNestedFilter(QueryBuilder nestedFilter) { - if (this.nestedSort != null) { - throw new IllegalArgumentException("Setting both nested_path/nested_filter and nested not allowed"); - } - this.nestedFilter = nestedFilter; - return this; - } - - /** - * Returns the nested filter that the nested objects should match with in order to be taken into account - * for sorting. - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} - * and retrieve with {@link #getNestedSort()} - **/ - @Deprecated - public QueryBuilder getNestedFilter() { - return this.nestedFilter; - } - - /** - * Sets the nested path if sorting occurs on a field that is inside a nested object. By default when sorting on a - * field inside a nested object, the nearest upper nested object is selected as nested path. - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} - * and retrieve with {@link #getNestedSort()} - **/ - @Deprecated - public GeoDistanceSortBuilder setNestedPath(String nestedPath) { - if (this.nestedSort != null) { - throw new IllegalArgumentException("Setting both nested_path/nested_filter and nested not allowed"); - } - this.nestedPath = nestedPath; - return this; - } - - /** - * Returns the nested path if sorting occurs on a field that is inside a nested object. By default when sorting on a - * field inside a nested object, the nearest upper nested object is selected as nested path. - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} - * and retrieve with {@link #getNestedSort()} - **/ - @Deprecated - public String getNestedPath() { - return this.nestedPath; - } - /** * Returns the {@link NestedSortBuilder} */ @@ -355,9 +303,6 @@ public NestedSortBuilder getNestedSort() { * order to be taken into account for sorting. */ public GeoDistanceSortBuilder setNestedSort(final NestedSortBuilder nestedSort) { - if (this.nestedFilter != null || this.nestedPath != null) { - throw new IllegalArgumentException("Setting both nested_path/nested_filter and nested not allowed"); - } this.nestedSort = nestedSort; return this; } @@ -393,12 +338,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(SORTMODE_FIELD.getPreferredName(), sortMode); } - if (nestedPath != null) { - builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath); - } - if (nestedFilter != null) { - builder.field(NESTED_FILTER_FIELD.getPreferredName(), nestedFilter, params); - } if (nestedSort != null) { builder.field(NESTED_FIELD.getPreferredName(), nestedSort); } @@ -432,8 +371,6 @@ public boolean equals(Object object) { Objects.equals(unit, other.unit) && Objects.equals(sortMode, other.sortMode) && Objects.equals(order, other.order) && - Objects.equals(nestedFilter, other.nestedFilter) && - Objects.equals(nestedPath, other.nestedPath) && Objects.equals(validation, other.validation) && Objects.equals(nestedSort, other.nestedSort) && ignoreUnmapped == other.ignoreUnmapped; @@ -442,8 +379,7 @@ public boolean equals(Object object) { @Override public int hashCode() { return Objects.hash(this.fieldName, this.points, this.geoDistance, - this.unit, this.sortMode, this.order, this.nestedFilter, - this.nestedPath, this.validation, this.nestedSort, this.ignoreUnmapped); + this.unit, this.sortMode, this.order, this.validation, this.nestedSort, this.ignoreUnmapped); } /** @@ -463,8 +399,6 @@ public static GeoDistanceSortBuilder fromXContent(XContentParser parser, String GeoDistance geoDistance = GeoDistance.ARC; SortOrder order = SortOrder.ASC; SortMode sortMode = null; - QueryBuilder nestedFilter = null; - String nestedPath = null; NestedSortBuilder nestedSort = null; GeoValidationMethod validation = null; boolean ignoreUnmapped = false; @@ -479,10 +413,7 @@ public static GeoDistanceSortBuilder fromXContent(XContentParser parser, String fieldName = currentName; } else if (token == XContentParser.Token.START_OBJECT) { - if (NESTED_FILTER_FIELD.match(currentName, parser.getDeprecationHandler())) { - deprecationLogger.deprecated("[nested_filter] has been deprecated in favour of the [nested] parameter"); - nestedFilter = parseInnerQueryBuilder(parser); - } else if (NESTED_FIELD.match(currentName, parser.getDeprecationHandler())) { + if (NESTED_FIELD.match(currentName, parser.getDeprecationHandler())) { nestedSort = NestedSortBuilder.fromXContent(parser); } else { // the json in the format of -> field : { lat : 30, lon : 12 } @@ -509,9 +440,6 @@ public static GeoDistanceSortBuilder fromXContent(XContentParser parser, String validation = GeoValidationMethod.fromString(parser.text()); } else if (SORTMODE_FIELD.match(currentName, parser.getDeprecationHandler())) { sortMode = SortMode.fromString(parser.text()); - } else if (NESTED_PATH_FIELD.match(currentName, parser.getDeprecationHandler())) { - deprecationLogger.deprecated("[nested_path] has been deprecated in favour of the [nested] parameter"); - nestedPath = parser.text(); } else if (IGNORE_UNMAPPED.match(currentName, parser.getDeprecationHandler())) { ignoreUnmapped = parser.booleanValue(); } else if (token == Token.VALUE_STRING){ @@ -549,10 +477,6 @@ public static GeoDistanceSortBuilder fromXContent(XContentParser parser, String if (sortMode != null) { result.sortMode(sortMode); } - if (nestedFilter != null) { - result.setNestedFilter(nestedFilter); - } - result.setNestedPath(nestedPath); if (nestedSort != null) { result.setNestedSort(nestedSort); } @@ -610,16 +534,15 @@ public SortFieldAndFormat build(QueryShardContext context) throws IOException { } final IndexGeoPointFieldData geoIndexFieldData = context.getForField(fieldType); - final Nested nested; + Nested nested = null; if (nestedSort != null) { if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { throw new QueryShardException(context, "max_children is only supported on last level of nested sort"); } - // new nested sorts takes priority nested = resolveNested(context, nestedSort); } else { - nested = resolveNested(context, nestedPath, nestedFilter); + validateMissingNestedPath(context, fieldName); } if (geoIndexFieldData.getClass() == LatLonPointDVIndexFieldData.class // only works with 5.x geo_point @@ -698,21 +621,13 @@ static void parseGeoPoints(XContentParser parser, List geoPoints) thro @Override public GeoDistanceSortBuilder rewrite(QueryRewriteContext ctx) throws IOException { - if (nestedFilter == null && nestedSort == null) { + if (nestedSort == null) { return this; } - if (nestedFilter != null) { - QueryBuilder rewrite = nestedFilter.rewrite(ctx); - if (nestedFilter == rewrite) { - return this; - } - return new GeoDistanceSortBuilder(this).setNestedFilter(rewrite); - } else { - NestedSortBuilder rewrite = nestedSort.rewrite(ctx); - if (nestedSort == rewrite) { - return this; - } - return new GeoDistanceSortBuilder(this).setNestedSort(rewrite); + NestedSortBuilder rewrite = nestedSort.rewrite(ctx); + if (nestedSort == rewrite) { + return this; } + return new GeoDistanceSortBuilder(this).setNestedSort(rewrite); } } diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 21de011e276b7..17fed4d9ac19c 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -19,18 +19,17 @@ package org.elasticsearch.search.sort; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -65,7 +64,6 @@ * Script sort builder allows to sort based on a custom script expression. */ public class ScriptSortBuilder extends SortBuilder { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(ScriptSortBuilder.class)); public static final String NAME = "_script"; public static final ParseField TYPE_FIELD = new ParseField("type"); @@ -78,10 +76,6 @@ public class ScriptSortBuilder extends SortBuilder { private SortMode sortMode; - private QueryBuilder nestedFilter; - - private String nestedPath; - private NestedSortBuilder nestedSort; /** @@ -105,8 +99,6 @@ public ScriptSortBuilder(Script script, ScriptSortType type) { this.type = original.type; this.order = original.order; this.sortMode = original.sortMode; - this.nestedFilter = original.nestedFilter; - this.nestedPath = original.nestedPath; this.nestedSort = original.nestedSort; } @@ -118,8 +110,12 @@ public ScriptSortBuilder(StreamInput in) throws IOException { type = ScriptSortType.readFromStream(in); order = SortOrder.readFromStream(in); sortMode = in.readOptionalWriteable(SortMode::readFromStream); - nestedPath = in.readOptionalString(); - nestedFilter = in.readOptionalNamedWriteable(QueryBuilder.class); + if (in.getVersion().before(Version.V_8_0_0)) { + if (in.readOptionalNamedWriteable(QueryBuilder.class) != null || in.readOptionalString() != null) { + throw new IOException("the [sort] options [nested_path] and [nested_filter] are removed in 8.x, " + + "please use [nested] instead"); + } + } nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); } @@ -129,8 +125,10 @@ public void writeTo(StreamOutput out) throws IOException { type.writeTo(out); order.writeTo(out); out.writeOptionalWriteable(sortMode); - out.writeOptionalString(nestedPath); - out.writeOptionalNamedWriteable(nestedFilter); + if (out.getVersion().before(Version.V_8_0_0)) { + out.writeOptionalString(null); + out.writeOptionalNamedWriteable(null); + } out.writeOptionalWriteable(nestedSort); } @@ -169,56 +167,6 @@ public SortMode sortMode() { return this.sortMode; } - /** - * Sets the nested filter that the nested objects should match with in order to be taken into account - * for sorting. - * - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} and retrieve with {@link #getNestedSort()} - */ - @Deprecated - public ScriptSortBuilder setNestedFilter(QueryBuilder nestedFilter) { - if (this.nestedSort != null) { - throw new IllegalArgumentException("Setting both nested_path/nested_filter and nested not allowed"); - } - this.nestedFilter = nestedFilter; - return this; - } - - /** - * Gets the nested filter. - * - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} and retrieve with {@link #getNestedSort()} - */ - @Deprecated - public QueryBuilder getNestedFilter() { - return this.nestedFilter; - } - - /** - * Sets the nested path if sorting occurs on a field that is inside a nested object. For sorting by script this - * needs to be specified. - * - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} and retrieve with {@link #getNestedSort()} - */ - @Deprecated - public ScriptSortBuilder setNestedPath(String nestedPath) { - if (this.nestedSort != null) { - throw new IllegalArgumentException("Setting both nested_path/nested_filter and nested not allowed"); - } - this.nestedPath = nestedPath; - return this; - } - - /** - * Gets the nested path. - * - * @deprecated set nested sort with {@link #setNestedSort(NestedSortBuilder)} and retrieve with {@link #getNestedSort()} - */ - @Deprecated - public String getNestedPath() { - return this.nestedPath; - } - /** * Returns the {@link NestedSortBuilder} */ @@ -233,9 +181,6 @@ public NestedSortBuilder getNestedSort() { * order to be taken into account for sorting. */ public ScriptSortBuilder setNestedSort(final NestedSortBuilder nestedSort) { - if (this.nestedFilter != null || this.nestedPath != null) { - throw new IllegalArgumentException("Setting both nested_path/nested_filter and nested not allowed"); - } this.nestedSort = nestedSort; return this; } @@ -250,12 +195,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) if (sortMode != null) { builder.field(SORTMODE_FIELD.getPreferredName(), sortMode); } - if (nestedPath != null) { - builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath); - } - if (nestedFilter != null) { - builder.field(NESTED_FILTER_FIELD.getPreferredName(), nestedFilter, builderParams); - } if (nestedSort != null) { builder.field(NESTED_FIELD.getPreferredName(), nestedSort); } @@ -273,14 +212,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) PARSER.declareField(constructorArg(), p -> ScriptSortType.fromString(p.text()), TYPE_FIELD, ValueType.STRING); PARSER.declareString((b, v) -> b.order(SortOrder.fromString(v)), ORDER_FIELD); PARSER.declareString((b, v) -> b.sortMode(SortMode.fromString(v)), SORTMODE_FIELD); - PARSER.declareString((fieldSortBuilder, nestedPath) -> { - deprecationLogger.deprecated("[nested_path] has been deprecated in favor of the [nested] parameter"); - fieldSortBuilder.setNestedPath(nestedPath); - }, NESTED_PATH_FIELD); - PARSER.declareObject(ScriptSortBuilder::setNestedFilter, (p, c) -> { - deprecationLogger.deprecated("[nested_filter] has been deprecated in favour for the [nested] parameter"); - return SortBuilder.parseNestedFilter(p); - }, NESTED_FILTER_FIELD); PARSER.declareObject(ScriptSortBuilder::setNestedSort, (p, c) -> NestedSortBuilder.fromXContent(p), NESTED_FIELD); } @@ -309,16 +240,13 @@ public SortFieldAndFormat build(QueryShardContext context) throws IOException { valueMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; } - final Nested nested; + Nested nested = null; if (nestedSort != null) { if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { throw new QueryShardException(context, "max_children is only supported on last level of nested sort"); } - // new nested sorts takes priority nested = resolveNested(context, nestedSort); - } else { - nested = resolveNested(context, nestedPath, nestedFilter); } final IndexFieldData.XFieldComparatorSource fieldComparatorSource; @@ -399,14 +327,12 @@ public boolean equals(Object object) { Objects.equals(type, other.type) && Objects.equals(order, other.order) && Objects.equals(sortMode, other.sortMode) && - Objects.equals(nestedFilter, other.nestedFilter) && - Objects.equals(nestedPath, other.nestedPath) && Objects.equals(nestedSort, other.nestedSort); } @Override public int hashCode() { - return Objects.hash(script, type, order, sortMode, nestedFilter, nestedPath, nestedSort); + return Objects.hash(script, type, order, sortMode, nestedSort); } @Override @@ -452,21 +378,13 @@ public String toString() { @Override public ScriptSortBuilder rewrite(QueryRewriteContext ctx) throws IOException { - if (nestedFilter == null && nestedSort == null) { + if (nestedSort == null) { return this; } - if (nestedFilter != null) { - QueryBuilder rewrite = nestedFilter.rewrite(ctx); - if (nestedFilter == rewrite) { - return this; - } - return new ScriptSortBuilder(this).setNestedFilter(rewrite); - } else { - NestedSortBuilder rewrite = nestedSort.rewrite(ctx); - if (nestedSort == rewrite) { - return this; - } - return new ScriptSortBuilder(this).setNestedSort(rewrite); + NestedSortBuilder rewrite = nestedSort.rewrite(ctx); + if (nestedSort == rewrite) { + return this; } + return new ScriptSortBuilder(this).setNestedSort(rewrite); } } diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 2060dde506907..881df666802d7 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -46,6 +46,7 @@ import java.util.Optional; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; +import static org.elasticsearch.search.sort.NestedSortBuilder.FILTER_FIELD; public abstract class SortBuilder> implements NamedWriteable, ToXContentObject, Rewriteable> { @@ -53,8 +54,6 @@ public abstract class SortBuilder> implements NamedWrit // parse fields common to more than one SortBuilder public static final ParseField ORDER_FIELD = new ParseField("order"); - public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter"); - public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path"); private static final Map> PARSERS = Map.of( ScriptSortBuilder.NAME, ScriptSortBuilder::fromXContent, @@ -171,12 +170,6 @@ public static Optional buildSort(List> sortBuilde return Optional.empty(); } - protected static Nested resolveNested(QueryShardContext context, String nestedPath, QueryBuilder nestedFilter) throws IOException { - NestedSortBuilder nestedSortBuilder = new NestedSortBuilder(nestedPath); - nestedSortBuilder.setFilter(nestedFilter); - return resolveNested(context, nestedSortBuilder); - } - protected static Nested resolveNested(QueryShardContext context, NestedSortBuilder nestedSort) throws IOException { final Query childQuery = resolveNestedQuery(context, nestedSort, null); if (childQuery == null) { @@ -207,7 +200,7 @@ private static Query resolveNestedQuery(QueryShardContext context, NestedSortBui if (nestedObjectMapper == null) { throw new QueryShardException(context, "[nested] failed to find nested object under path [" + nestedPath + "]"); } - if (!nestedObjectMapper.nested().isNested()) { + if (nestedObjectMapper.nested().isNested() == false) { throw new QueryShardException(context, "[nested] nested object under path [" + nestedPath + "] is not of nested type"); } ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); @@ -256,7 +249,7 @@ protected static QueryBuilder parseNestedFilter(XContentParser parser) { try { return parseInnerQueryBuilder(parser); } catch (Exception e) { - throw new ParsingException(parser.getTokenLocation(), "Expected " + NESTED_FILTER_FIELD.getPreferredName() + " element.", e); + throw new ParsingException(parser.getTokenLocation(), "Expected " + FILTER_FIELD.getPreferredName() + " element.", e); } } diff --git a/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 1937bb2a847fd..d80fa2c1e4cca 100644 --- a/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -385,7 +385,8 @@ public void testSimpleNestedSorting() throws Exception { SearchResponse searchResponse = client().prepareSearch("test") .setQuery(QueryBuilders.matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC).setNestedPath("nested1")) + .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC) + .setNestedSort(new NestedSortBuilder("nested1"))) .get(); assertHitCount(searchResponse, 3); @@ -399,7 +400,8 @@ public void testSimpleNestedSorting() throws Exception { searchResponse = client().prepareSearch("test") .setQuery(QueryBuilders.matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC).setNestedPath("nested1")) + .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC) + .setNestedSort(new NestedSortBuilder("nested1"))) .get(); assertHitCount(searchResponse, 3); @@ -476,8 +478,10 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test") .setQuery(QueryBuilders.matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested1.field1").setNestedPath("nested1") - .setNestedFilter(termQuery("nested1.field2", true)).missing(10).order(SortOrder.ASC)); + .addSort(SortBuilders.fieldSort("nested1.field1") + .setNestedSort(new NestedSortBuilder("nested1") + .setFilter(termQuery("nested1.field2", true))) + .missing(10).order(SortOrder.ASC)); if (randomBoolean()) { searchRequestBuilder.setScroll("10m"); @@ -494,8 +498,10 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("10")); searchRequestBuilder = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested1.field1").setNestedPath("nested1") - .setNestedFilter(termQuery("nested1.field2", true)).missing(10).order(SortOrder.DESC)); + .addSort(SortBuilders.fieldSort("nested1.field1") + .setNestedSort(new NestedSortBuilder("nested1") + .setFilter(termQuery("nested1.field2", true))) + .missing(10).order(SortOrder.DESC)); if (randomBoolean()) { searchRequestBuilder.setScroll("10m"); @@ -953,7 +959,7 @@ public void testSortNestedWithNestedFilter() throws Exception { .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") - .setNestedPath("parent.child") + .setNestedSort(new NestedSortBuilder("parent.child")) .order(SortOrder.ASC) ) .get(); @@ -967,12 +973,13 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("-1")); // With nested filter + NestedSortBuilder nestedSort = new NestedSortBuilder("parent.child"); + nestedSort.setFilter(QueryBuilders.termQuery("parent.child.filter", true)); searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") - .setNestedPath("parent.child") - .setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true)) + .setNestedSort(nestedSort) .order(SortOrder.ASC) ) .get(); @@ -990,8 +997,7 @@ public void testSortNestedWithNestedFilter() throws Exception { .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") - .setNestedPath("parent.child") - .setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true)) + .setNestedSort(nestedSort) .order(SortOrder.ASC) ) .get(); @@ -1005,12 +1011,12 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + nestedSort.setFilter(QueryBuilders.termQuery("parent.filter", false)); searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.parent_values") - .setNestedPath("parent.child") - .setNestedFilter(QueryBuilders.termQuery("parent.filter", false)) + .setNestedSort(nestedSort) .order(SortOrder.ASC) ) .get(); @@ -1051,8 +1057,8 @@ public void testSortNestedWithNestedFilter() throws Exception { .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_obj.value") - .setNestedPath("parent.child") - .setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true)) + .setNestedSort(new NestedSortBuilder("parent.child") + .setFilter(QueryBuilders.termQuery("parent.child.filter", true))) .order(SortOrder.ASC) ) .get(); @@ -1071,7 +1077,7 @@ public void testSortNestedWithNestedFilter() throws Exception { .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") - .setNestedPath("parent.child") + .setNestedSort(new NestedSortBuilder("parent.child")) .sortMode(SortMode.SUM) .order(SortOrder.ASC) ) @@ -1091,7 +1097,7 @@ public void testSortNestedWithNestedFilter() throws Exception { .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") - .setNestedPath("parent.child") + .setNestedSort(new NestedSortBuilder("parent.child")) .sortMode(SortMode.SUM) .order(SortOrder.DESC) ) @@ -1111,8 +1117,9 @@ public void testSortNestedWithNestedFilter() throws Exception { .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") - .setNestedPath("parent.child") - .setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true)) + .setNestedSort(new NestedSortBuilder("parent.child") + .setFilter(QueryBuilders.termQuery("parent.child.filter", true)) + ) .sortMode(SortMode.SUM) .order(SortOrder.ASC) ) @@ -1132,7 +1139,7 @@ public void testSortNestedWithNestedFilter() throws Exception { .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") - .setNestedPath("parent.child") + .setNestedSort(new NestedSortBuilder("parent.child")) .sortMode(SortMode.AVG) .order(SortOrder.ASC) ) @@ -1151,7 +1158,7 @@ public void testSortNestedWithNestedFilter() throws Exception { .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") - .setNestedPath("parent.child") + .setNestedSort(new NestedSortBuilder("parent.child")) .sortMode(SortMode.AVG) .order(SortOrder.DESC) ) @@ -1171,8 +1178,9 @@ public void testSortNestedWithNestedFilter() throws Exception { .setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") - .setNestedPath("parent.child") - .setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true)) + .setNestedSort(new NestedSortBuilder("parent.child") + .setFilter(QueryBuilders.termQuery("parent.child.filter", true)) + ) .sortMode(SortMode.AVG) .order(SortOrder.ASC) ) @@ -1307,13 +1315,15 @@ public void testNestedSortingWithNestedFilterAsFilter() throws Exception { SearchResponse searchResponse = client().prepareSearch("test") .addSort(SortBuilders.fieldSort("users.first") - .setNestedPath("users") + .setNestedSort(new NestedSortBuilder("users")) .order(SortOrder.ASC)) .addSort(SortBuilders.fieldSort("users.first") .order(SortOrder.ASC) - .setNestedPath("users") - .setNestedFilter(nestedQuery("users.workstations", termQuery("users.workstations.stationid", "s5"), - ScoreMode.Avg))) + .setNestedSort(new NestedSortBuilder("users") + .setFilter(nestedQuery("users.workstations", termQuery("users.workstations.stationid", "s5"), + ScoreMode.Avg)) + ) + ) .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 2); diff --git a/server/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java index 04349e846c78f..fb42d244f78ff 100644 --- a/server/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -163,9 +164,13 @@ private TestContext create(SearchType... searchTypes) throws Exception { } } else { if (randomBoolean()) { - sort = SortBuilders.fieldSort("nested.field3").missing(1); + sort = SortBuilders.fieldSort("nested.field3") + .setNestedSort(new NestedSortBuilder("nested")) + .missing(1); } else { - sort = SortBuilders.fieldSort("nested.field4").missing("1"); + sort = SortBuilders.fieldSort("nested.field4") + .setNestedSort(new NestedSortBuilder("nested")) + .missing("1"); } } sort.order(randomBoolean() ? SortOrder.ASC : SortOrder.DESC); diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index ee9c8f8ed1105..1623bc2bb378d 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -45,7 +45,6 @@ import org.elasticsearch.search.MultiValueMode; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -90,17 +89,7 @@ public FieldSortBuilder randomFieldSortBuilder() { builder.sortMode(randomFrom(SortMode.values())); } if (randomBoolean()) { - if (randomBoolean()) { - builder.setNestedSort(createRandomNestedSort(3)); - } else { - // the following are alternative ways to setNestedSort for nested sorting - if (randomBoolean()) { - builder.setNestedFilter(randomNestedFilter()); - } - if (randomBoolean()) { - builder.setNestedPath(randomAlphaOfLengthBetween(1, 10)); - } - } + builder.setNestedSort(createRandomNestedSort(3)); } if (randomBoolean()) { builder.setNumericType(randomFrom(random(), "long", "double", "date", "date_nanos")); @@ -114,16 +103,8 @@ protected FieldSortBuilder mutate(FieldSortBuilder original) throws IOException int parameter = randomIntBetween(0, 5); switch (parameter) { case 0: - if (original.getNestedPath() == null && original.getNestedFilter() == null) { - mutated.setNestedSort( - randomValueOtherThan(original.getNestedSort(), () -> NestedSortBuilderTests.createRandomNestedSort(3))); - } else { - if (randomBoolean()) { - mutated.setNestedPath(randomValueOtherThan(original.getNestedPath(), () -> randomAlphaOfLengthBetween(1, 10))); - } else { - mutated.setNestedFilter(randomValueOtherThan(original.getNestedFilter(), () -> randomNestedFilter())); - } - } + mutated.setNestedSort( + randomValueOtherThan(original.getNestedSort(), () -> NestedSortBuilderTests.createRandomNestedSort(3))); break; case 1: mutated.sortMode(randomValueOtherThan(original.sortMode(), () -> randomFrom(SortMode.values()))); @@ -285,7 +266,8 @@ public void testBuildNested() throws IOException { assertNotNull(nested); assertEquals(new TermQuery(new Term(MAPPED_STRING_FIELDNAME, "value")), nested.getInnerQuery()); - sortBuilder = new FieldSortBuilder("fieldName").setNestedPath("path"); + NestedSortBuilder nestedSort = new NestedSortBuilder("path"); + sortBuilder = new FieldSortBuilder("fieldName").setNestedSort(nestedSort); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); @@ -293,19 +275,14 @@ public void testBuildNested() throws IOException { assertNotNull(nested); assertEquals(new TermQuery(new Term(TypeFieldMapper.NAME, "__path")), nested.getInnerQuery()); - sortBuilder = new FieldSortBuilder("fieldName").setNestedPath("path") - .setNestedFilter(QueryBuilders.termQuery(MAPPED_STRING_FIELDNAME, "value")); + nestedSort.setFilter(QueryBuilders.termQuery(MAPPED_STRING_FIELDNAME, "value")); + sortBuilder = new FieldSortBuilder("fieldName").setNestedSort(nestedSort); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); assertNotNull(nested); assertEquals(new TermQuery(new Term(MAPPED_STRING_FIELDNAME, "value")), nested.getInnerQuery()); - - // if nested path is missing, we omit any filter and return a SortedNumericSortField - sortBuilder = new FieldSortBuilder("fieldName").setNestedFilter(QueryBuilders.termQuery(MAPPED_STRING_FIELDNAME, "value")); - sortField = sortBuilder.build(shardContextMock).field; - assertThat(sortField, instanceOf(SortedNumericSortField.class)); } public void testUnknownOptionFails() throws IOException { @@ -364,22 +341,6 @@ public void testModeNonNumericField() throws IOException { assertEquals(expectedError, e.getMessage()); } - /** - * Test we can either set nested sort via path/filter or via nested sort builder, not both - */ - public void testNestedSortBothThrows() throws IOException { - FieldSortBuilder sortBuilder = new FieldSortBuilder(MAPPED_STRING_FIELDNAME); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, - () -> sortBuilder.setNestedPath("nestedPath").setNestedSort(new NestedSortBuilder("otherPath"))); - assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); - iae = expectThrows(IllegalArgumentException.class, - () -> sortBuilder.setNestedSort(new NestedSortBuilder("otherPath")).setNestedPath("nestedPath")); - assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); - iae = expectThrows(IllegalArgumentException.class, - () -> sortBuilder.setNestedSort(new NestedSortBuilder("otherPath")).setNestedFilter(QueryBuilders.matchAllQuery())); - assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); - } - /** * Test the nested Filter gets rewritten */ @@ -391,10 +352,12 @@ public QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOEx return new MatchNoneQueryBuilder(); } }; - sortBuilder.setNestedPath("path").setNestedFilter(rangeQuery); + NestedSortBuilder nestedSort = new NestedSortBuilder("path"); + nestedSort.setFilter(rangeQuery); + sortBuilder.setNestedSort(nestedSort); FieldSortBuilder rewritten = sortBuilder .rewrite(createMockShardContext()); - assertNotSame(rangeQuery, rewritten.getNestedFilter()); + assertNotSame(rangeQuery, rewritten.getNestedSort().getFilter()); } /** @@ -414,20 +377,6 @@ public QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOEx assertNotSame(rangeQuery, rewritten.getNestedSort().getFilter()); } - @Override - protected void assertWarnings(FieldSortBuilder testItem) { - List expectedWarnings = new ArrayList<>(); - if (testItem.getNestedFilter() != null) { - expectedWarnings.add("[nested_filter] has been deprecated in favour for the [nested] parameter"); - } - if (testItem.getNestedPath() != null) { - expectedWarnings.add("[nested_path] has been deprecated in favor of the [nested] parameter"); - } - if (expectedWarnings.isEmpty() == false) { - assertWarnings(expectedWarnings.toArray(new String[expectedWarnings.size()])); - } - } - @Override protected FieldSortBuilder fromXContent(XContentParser parser, String fieldName) throws IOException { return FieldSortBuilder.fromXContent(parser, fieldName); diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index d3f21867ab1d1..400dc30afa11f 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -1443,9 +1443,11 @@ public void testNestedSort() throws IOException, InterruptedException, Execution refresh(); // We sort on nested field + SearchResponse searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested.foo").setNestedPath("nested").order(SortOrder.DESC)) + .addSort(SortBuilders.fieldSort("nested.foo") + .setNestedSort(new NestedSortBuilder("nested")).order(SortOrder.DESC)) .get(); assertNoFailures(searchResponse); SearchHit[] hits = searchResponse.getHits().getHits(); @@ -1474,7 +1476,8 @@ public void testNestedSort() throws IOException, InterruptedException, Execution // We sort on nested sub field searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("nested.foo.sub").setNestedPath("nested").order(SortOrder.DESC)) + .addSort(SortBuilders.fieldSort("nested.foo.sub") + .setNestedSort(new NestedSortBuilder("nested")).order(SortOrder.DESC)) .get(); assertNoFailures(searchResponse); hits = searchResponse.getHits().getHits(); @@ -1483,6 +1486,14 @@ public void testNestedSort() throws IOException, InterruptedException, Execution assertThat(hits[1].getSortValues().length, is(1)); assertThat(hits[0].getSortValues()[0], is("cba bca")); assertThat(hits[1].getSortValues()[0], is("bar bar")); + + // missing nested path + SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class, + () -> client().prepareSearch() + .setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("nested.foo")) + .get() + ); + assertThat(exc.toString(), containsString("it is mandatory to set the [nested] context")); } public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java index 87d84484aca0a..f3899785c499c 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -272,7 +272,8 @@ public void testDistanceSortingNestedFields() throws Exception { // Order: Asc SearchResponse searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders - .geoDistanceSort("branches.location", 40.7143528, -74.0059731).order(SortOrder.ASC).setNestedPath("branches")) + .geoDistanceSort("branches.location", 40.7143528, -74.0059731).order(SortOrder.ASC) + .setNestedSort(new NestedSortBuilder("branches"))) .get(); assertHitCount(searchResponse, 4); @@ -285,7 +286,8 @@ public void testDistanceSortingNestedFields() throws Exception { // Order: Asc, Mode: max searchResponse = client() .prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", - 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode(SortMode.MAX).setNestedPath("branches")) + 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode(SortMode.MAX) + .setNestedSort(new NestedSortBuilder("branches"))) .get(); assertHitCount(searchResponse, 4); @@ -297,7 +299,8 @@ public void testDistanceSortingNestedFields() throws Exception { // Order: Desc searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders - .geoDistanceSort("branches.location", 40.7143528, -74.0059731).order(SortOrder.DESC).setNestedPath("branches")) + .geoDistanceSort("branches.location", 40.7143528, -74.0059731).order(SortOrder.DESC) + .setNestedSort(new NestedSortBuilder("branches"))) .get(); assertHitCount(searchResponse, 4); @@ -310,7 +313,8 @@ public void testDistanceSortingNestedFields() throws Exception { // Order: Desc, Mode: min searchResponse = client() .prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", - 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode(SortMode.MIN).setNestedPath("branches")) + 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode(SortMode.MIN) + .setNestedSort(new NestedSortBuilder("branches"))) .get(); assertHitCount(searchResponse, 4); @@ -322,7 +326,8 @@ public void testDistanceSortingNestedFields() throws Exception { searchResponse = client() .prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", - 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.ASC).setNestedPath("branches")) + 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.ASC) + .setNestedSort(new NestedSortBuilder("branches"))) .get(); assertHitCount(searchResponse, 4); @@ -334,7 +339,8 @@ public void testDistanceSortingNestedFields() throws Exception { searchResponse = client().prepareSearch("companies") .setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .setNestedPath("branches").sortMode(SortMode.AVG).order(SortOrder.DESC).setNestedPath("branches")) + .setNestedSort(new NestedSortBuilder("branches")) + .sortMode(SortMode.AVG).order(SortOrder.DESC)) .get(); assertHitCount(searchResponse, 4); @@ -346,8 +352,10 @@ public void testDistanceSortingNestedFields() throws Exception { searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .setNestedFilter(termQuery("branches.name", "brooklyn")) - .sortMode(SortMode.AVG).order(SortOrder.ASC).setNestedPath("branches")) + .setNestedSort(new NestedSortBuilder("branches") + .setFilter(termQuery("branches.name", "brooklyn")) + ) + .sortMode(SortMode.AVG).order(SortOrder.ASC)) .get(); assertHitCount(searchResponse, 4); assertFirstHit(searchResponse, hasId("4")); @@ -360,7 +368,7 @@ public void testDistanceSortingNestedFields() throws Exception { try { client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731).sortMode(SortMode.SUM) - .setNestedPath("branches")); + .setNestedSort(new NestedSortBuilder("branches"))); fail("Sum should not be allowed as sort mode"); } catch (IllegalArgumentException e) { //expected diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index c90e6ef0dde39..73be5837faa90 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -50,9 +50,7 @@ import org.elasticsearch.test.geo.RandomGeoGenerator; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.instanceOf; @@ -106,20 +104,10 @@ public static GeoDistanceSortBuilder randomGeoDistanceSortBuilder() { result.validation(randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values()))); } if (randomBoolean()) { - if (randomBoolean()) { - // don't fully randomize here, GeoDistanceSort is picky about the filters that are allowed - NestedSortBuilder nestedSort = new NestedSortBuilder(randomAlphaOfLengthBetween(3, 10)); - nestedSort.setFilter(new MatchAllQueryBuilder()); - result.setNestedSort(nestedSort); - } else { - // the following are alternative ways to setNestedSort for nested sorting - if (randomBoolean()) { - result.setNestedFilter(new MatchAllQueryBuilder()); - } - if (randomBoolean()) { - result.setNestedPath(randomAlphaOfLengthBetween(1, 10)); - } - } + // don't fully randomize here, GeoDistanceSort is picky about the filters that are allowed + NestedSortBuilder nestedSort = new NestedSortBuilder(randomAlphaOfLengthBetween(3, 10)); + nestedSort.setFilter(new MatchAllQueryBuilder()); + result.setNestedSort(nestedSort); } if (randomBoolean()) { result.ignoreUnmapped(result.ignoreUnmapped() == false); @@ -183,16 +171,8 @@ protected GeoDistanceSortBuilder mutate(GeoDistanceSortBuilder original) throws () -> randomFrom(SortMode.values()))); break; case 6: - if (original.getNestedPath() == null && original.getNestedFilter() == null) { - result.setNestedSort( - randomValueOtherThan(original.getNestedSort(), () -> NestedSortBuilderTests.createRandomNestedSort(3))); - } else { - if (randomBoolean()) { - result.setNestedPath(randomValueOtherThan(original.getNestedPath(), () -> randomAlphaOfLengthBetween(1, 10))); - } else { - result.setNestedFilter(randomValueOtherThan(original.getNestedFilter(), () -> randomNestedFilter())); - } - } + result.setNestedSort( + randomValueOtherThan(original.getNestedSort(), () -> NestedSortBuilderTests.createRandomNestedSort(3))); break; case 7: result.validation(randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values()))); @@ -387,21 +367,6 @@ private GeoDistanceSortBuilder parse(XContentBuilder sortBuilder) throws Excepti } } - @Override - protected void assertWarnings(GeoDistanceSortBuilder testItem) { - List expectedWarnings = new ArrayList<>(); - if (testItem.getNestedFilter() != null) { - expectedWarnings.add("[nested_filter] has been deprecated in favour of the [nested] parameter"); - } - if (testItem.getNestedPath() != null) { - expectedWarnings.add("[nested_path] has been deprecated in favour of the [nested] parameter"); - } - if (expectedWarnings.isEmpty() == false) { - assertWarnings(expectedWarnings.toArray(new String[expectedWarnings.size()])); - } - } - - @Override protected GeoDistanceSortBuilder fromXContent(XContentParser parser, String fieldName) throws IOException { return GeoDistanceSortBuilder.fromXContent(parser, fieldName); @@ -433,7 +398,7 @@ public void testCommonCaseIsOptimized() throws IOException { assertEquals(SortField.class, sort.field.getClass()); // descending means the max value should be considered rather than min builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1)); - builder.setNestedPath("some_nested_path"); + builder.setNestedSort(new NestedSortBuilder("some_nested_path")); sort = builder.build(context); assertEquals(SortField.class, sort.field.getClass()); // can't use LatLon optimized sorting with nested fields @@ -523,7 +488,8 @@ public void testBuildNested() throws IOException { assertNotNull(nested); assertEquals(new MatchAllDocsQuery(), nested.getInnerQuery()); - sortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0).setNestedPath("path"); + sortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0) + .setNestedSort(new NestedSortBuilder("path")); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); @@ -531,20 +497,16 @@ public void testBuildNested() throws IOException { assertNotNull(nested); assertEquals(new TermQuery(new Term(TypeFieldMapper.NAME, "__path")), nested.getInnerQuery()); - sortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0).setNestedPath("path") - .setNestedFilter(QueryBuilders.matchAllQuery()); + sortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0) + .setNestedSort(new NestedSortBuilder("path") + .setFilter(QueryBuilders.matchAllQuery()) + ); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); assertNotNull(nested); assertEquals(new MatchAllDocsQuery(), nested.getInnerQuery()); - - // if nested path is missing, we omit any filter and return a regular SortField - // (LatLonSortField) - sortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0).setNestedFilter(QueryBuilders.termQuery("fieldName", "value")); - sortField = sortBuilder.build(shardContextMock).field; - assertThat(sortField, instanceOf(SortField.class)); } /** @@ -579,22 +541,6 @@ public void testBuildInvalidPoints() throws IOException { } } - /** - * Test we can either set nested sort via path/filter or via nested sort builder, not both - */ - public void testNestedSortBothThrows() throws IOException { - GeoDistanceSortBuilder sortBuilder = new GeoDistanceSortBuilder("fieldName", 0.0, 0.0); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, - () -> sortBuilder.setNestedPath("nestedPath").setNestedSort(new NestedSortBuilder("otherPath"))); - assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); - iae = expectThrows(IllegalArgumentException.class, - () -> sortBuilder.setNestedSort(new NestedSortBuilder("otherPath")).setNestedPath("nestedPath")); - assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); - iae = expectThrows(IllegalArgumentException.class, - () -> sortBuilder.setNestedSort(new NestedSortBuilder("otherPath")).setNestedFilter(QueryBuilders.matchAllQuery())); - assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); - } - /** * Test the nested Filter gets rewritten */ @@ -606,10 +552,12 @@ public QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOEx return new MatchNoneQueryBuilder(); } }; - sortBuilder.setNestedPath("path").setNestedFilter(rangeQuery); - GeoDistanceSortBuilder rewritten = (GeoDistanceSortBuilder) sortBuilder - .rewrite(createMockShardContext()); - assertNotSame(rangeQuery, rewritten.getNestedFilter()); + sortBuilder.setNestedSort( + new NestedSortBuilder("path") + .setFilter(rangeQuery) + ); + GeoDistanceSortBuilder rewritten = sortBuilder.rewrite(createMockShardContext()); + assertNotSame(rangeQuery, rewritten.getNestedSort().getFilter()); } /** diff --git a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 3017a2e0c067d..1d3b71bbf40db 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -324,7 +324,8 @@ public void testBuildNested() throws IOException { assertNotNull(nested); assertEquals(new MatchAllDocsQuery(), nested.getInnerQuery()); - sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER).setNestedPath("path"); + sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER) + .setNestedSort(new NestedSortBuilder("path")); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); @@ -332,40 +333,17 @@ public void testBuildNested() throws IOException { assertNotNull(nested); assertEquals(new TermQuery(new Term(TypeFieldMapper.NAME, "__path")), nested.getInnerQuery()); - sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER).setNestedPath("path") - .setNestedFilter(QueryBuilders.matchAllQuery()); + sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER) + .setNestedSort(new NestedSortBuilder("path") + .setFilter(QueryBuilders.matchAllQuery())); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); assertNotNull(nested); assertEquals(new MatchAllDocsQuery(), nested.getInnerQuery()); - - // if nested path is missing, we omit nested element in the comparator - sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER) - .setNestedFilter(QueryBuilders.matchAllQuery()); - sortField = sortBuilder.build(shardContextMock).field; - assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); - comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); - assertNull(comparatorSource.nested()); } - /** - * Test we can either set nested sort via path/filter or via nested sort builder, not both - */ - public void testNestedSortBothThrows() throws IOException { - ScriptSortBuilder sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, - () -> sortBuilder.setNestedPath("nestedPath").setNestedSort(new NestedSortBuilder("otherPath"))); - assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); - iae = expectThrows(IllegalArgumentException.class, - () -> sortBuilder.setNestedSort(new NestedSortBuilder("otherPath")).setNestedPath("nestedPath")); - assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); - iae = expectThrows(IllegalArgumentException.class, - () -> sortBuilder.setNestedSort(new NestedSortBuilder("otherPath")).setNestedFilter(QueryBuilders.matchAllQuery())); - assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); - } - /** * Test the nested Filter gets rewritten */ @@ -377,10 +355,10 @@ public QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOEx return new MatchNoneQueryBuilder(); } }; - sortBuilder.setNestedPath("path").setNestedFilter(rangeQuery); + sortBuilder.setNestedSort(new NestedSortBuilder("path").setFilter(rangeQuery)); ScriptSortBuilder rewritten = sortBuilder .rewrite(createMockShardContext()); - assertNotSame(rangeQuery, rewritten.getNestedFilter()); + assertNotSame(rangeQuery, rewritten.getNestedSort().getFilter()); } /** diff --git a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java index fab089ef2cfc2..7b1e56d5c69f2 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java @@ -143,16 +143,6 @@ public void testRandomSortBuilders() throws IOException { xContentBuilder.field("sort"); } for (SortBuilder builder : testBuilders) { - if (builder instanceof GeoDistanceSortBuilder) { - GeoDistanceSortBuilder gdsb = (GeoDistanceSortBuilder) builder; - if (gdsb.getNestedFilter() != null) { - expectedWarningHeaders.add("[nested_filter] has been deprecated in favour of the [nested] parameter"); - } - if (gdsb.getNestedPath() != null) { - expectedWarningHeaders.add("[nested_path] has been deprecated in favour of the [nested] parameter"); - } - } - if (builder instanceof ScoreSortBuilder || builder instanceof FieldSortBuilder) { switch (randomIntBetween(0, 2)) { case 0: From de2abe8849d55b2d6f335eea6740ec12ab9ba08b Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 27 Jun 2019 16:37:00 +0100 Subject: [PATCH 040/140] [ML] Don't write timing stats on no-op (#43680) Similar to elastic/ml-cpp#512, if a job opens and closes and does nothing in between we shouldn't write timing stats to the results index. --- .../job/persistence/TimingStatsReporter.java | 8 ++++++++ .../output/AutodetectResultProcessor.java | 2 +- .../persistence/TimingStatsReporterTests.java | 19 +++++++++++++++++++ .../rest-api-spec/test/ml/index_layout.yml | 16 ++++++++-------- 4 files changed, 36 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/TimingStatsReporter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/TimingStatsReporter.java index 51903a1676bbe..d30335a5f06e9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/TimingStatsReporter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/TimingStatsReporter.java @@ -42,6 +42,14 @@ public void reportBucketProcessingTime(long bucketProcessingTimeMs) { } } + public void finishReporting() { + // Don't flush if current timing stats are identical to the persisted ones + if (currentTimingStats.equals(persistedTimingStats)) { + return; + } + flush(); + } + public void flush() { persistedTimingStats = new TimingStats(currentTimingStats); bulkResultsPersister.persistTimingStats(persistedTimingStats); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java index 6016af406cf64..d2d052b1a3e6c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java @@ -134,7 +134,7 @@ public void process() { try { if (processKilled == false) { - timingStatsReporter.flush(); + timingStatsReporter.finishReporting(); bulkResultsPersister.executeRequest(); } } catch (Exception e) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/TimingStatsReporterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/TimingStatsReporterTests.java index fb9f31f1d96f8..f2314e6de3ed8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/TimingStatsReporterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/TimingStatsReporterTests.java @@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; public class TimingStatsReporterTests extends ESTestCase { @@ -76,6 +77,24 @@ public void testFlush() { inOrder.verifyNoMoreInteractions(); } + public void testFinishReportingNoChange() { + TimingStatsReporter reporter = new TimingStatsReporter(new TimingStats(JOB_ID), bulkResultsPersister); + + reporter.finishReporting(); + + verifyZeroInteractions(bulkResultsPersister); + } + + public void testFinishReportingWithChange() { + TimingStatsReporter reporter = new TimingStatsReporter(new TimingStats(JOB_ID), bulkResultsPersister); + + reporter.reportBucketProcessingTime(10); + + reporter.finishReporting(); + + verify(bulkResultsPersister).persistTimingStats(new TimingStats(JOB_ID, 1, 10.0, 10.0, 10.0, 10.0)); + } + public void testTimingStatsDifferSignificantly() { assertThat( TimingStatsReporter.differSignificantly( diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml index a8e4bf90d4d77..eb3a73424a601 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml @@ -124,7 +124,7 @@ setup: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser count: index: .ml-anomalies-shared - - match: {count: 8} + - match: {count: 6} - do: headers: @@ -138,7 +138,7 @@ setup: term: job_id: index-layout-job - - match: {count: 4} + - match: {count: 3} - do: headers: @@ -152,7 +152,7 @@ setup: term: job_id: index-layout-job - - match: {count: 4} + - match: {count: 3} - do: headers: @@ -166,7 +166,7 @@ setup: term: job_id: index-layout-job2 - - match: {count: 4} + - match: {count: 3} - do: headers: @@ -179,7 +179,7 @@ setup: filter: term: job_id: index-layout-job2 - - match: {count: 4} + - match: {count: 3} - do: headers: @@ -236,7 +236,7 @@ setup: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser count: index: .ml-anomalies-shared - - match: {count: 4} + - match: {count: 3} - do: @@ -251,7 +251,7 @@ setup: term: job_id: index-layout-job2 - - match: {count: 4} + - match: {count: 3} - do: headers: @@ -265,7 +265,7 @@ setup: term: job_id: index-layout-job2 - - match: {count: 4} + - match: {count: 3} - do: ml.delete_job: From 29f5c2d0ba0405680eb7dee806f9fa2c655932c7 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 27 Jun 2019 18:08:48 +0200 Subject: [PATCH 041/140] Handle situation where only voting-only nodes are bootstrapped (#43628) Adds support for the situation where only voting-only nodes are bootstrapped. In that case, they will still try to become elected and bring full master nodes into the cluster. --- .../coordination/CoordinationState.java | 3 +- .../AbstractCoordinatorTestCase.java | 15 +++++++-- .../coordination/VotingOnlyNodePlugin.java | 30 ++++++++++++++--- .../VotingOnlyNodeCoordinatorTests.java | 32 +++++++++++++++++-- .../VotingOnlyNodePluginTests.java | 12 +++++++ 5 files changed, 82 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java index 9df6d6e0bbea7..7aad43aaab288 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java @@ -251,7 +251,8 @@ public boolean handleJoin(Join join) { boolean added = joinVotes.addJoinVote(join); boolean prevElectionWon = electionWon; electionWon = isElectionQuorum(joinVotes); - assert !prevElectionWon || electionWon; // we cannot go from won to not won + assert !prevElectionWon || electionWon : // we cannot go from won to not won + "locaNode= " + localNode + ", join=" + join + ", joinVotes=" + joinVotes; logger.debug("handleJoin: added join {} from [{}] for election, electionWon={} lastAcceptedTerm={} lastAcceptedVersion={}", join, join.getSourceNode(), electionWon, lastAcceptedTerm, getLastAcceptedVersion()); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 0547412f61500..8bdedaceba71c 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -67,6 +67,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.disruption.DisruptableMockTransport; import org.elasticsearch.test.disruption.DisruptableMockTransport.ConnectionStatus; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportService; import org.hamcrest.Matcher; import org.hamcrest.core.IsCollectionContaining; @@ -822,7 +824,8 @@ protected Optional getDisruptableMockTransport(Transpo .putList(ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey(), ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.get(Settings.EMPTY)).build(); // suppress auto-bootstrap transportService = mockTransport.createTransportService( - settings, deterministicTaskQueue.getThreadPool(this::onNode), NOOP_TRANSPORT_INTERCEPTOR, + settings, deterministicTaskQueue.getThreadPool(this::onNode), + getTransportInterceptor(localNode, deterministicTaskQueue.getThreadPool(this::onNode)), a -> localNode, null, emptySet()); masterService = new AckedFakeThreadPoolMasterService(localNode.getId(), "test", runnable -> deterministicTaskQueue.scheduleNow(onNode(runnable))); @@ -839,7 +842,7 @@ protected Optional getDisruptableMockTransport(Transpo coordinator = new Coordinator("test_node", settings, clusterSettings, transportService, writableRegistry(), allocationService, masterService, this::getPersistedState, Cluster.this::provideSeedHosts, clusterApplierService, onJoinValidators, Randomness.get(), s -> {}, - ElectionStrategy.DEFAULT_INSTANCE); + getElectionStrategy()); masterService.setClusterStatePublisher(coordinator); final GatewayService gatewayService = new GatewayService(settings, allocationService, clusterService, deterministicTaskQueue.getThreadPool(this::onNode), null, coordinator); @@ -1099,6 +1102,14 @@ private List provideSeedHosts(SeedHostsProvider.HostsResolver } } + protected TransportInterceptor getTransportInterceptor(DiscoveryNode localNode, ThreadPool threadPool) { + return NOOP_TRANSPORT_INTERCEPTOR; + } + + protected ElectionStrategy getElectionStrategy() { + return ElectionStrategy.DEFAULT_INSTANCE; + } + public static final String NODE_ID_LOG_CONTEXT_KEY = "nodeId"; protected static String getNodeIdForLogContext(DiscoveryNode node) { diff --git a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePlugin.java b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePlugin.java index 3142db91d3855..202cd3f095316 100644 --- a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePlugin.java +++ b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePlugin.java @@ -50,6 +50,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Predicate; import java.util.function.Supplier; public class VotingOnlyNodePlugin extends Plugin implements DiscoveryPlugin, NetworkPlugin, ActionPlugin { @@ -149,16 +150,35 @@ public boolean satisfiesAdditionalQuorumConstraints(DiscoveryNode localNode, lon if (joinVotes.nodes().stream().filter(DiscoveryNode::isMasterNode).allMatch(VotingOnlyNodePlugin::isVotingOnlyNode)) { return false; } - // if there's a vote from a full master node with same last accepted term and version, that node should become master - // instead, so we should stand down - if (joinVotes.getJoins().stream().anyMatch(join -> isFullMasterNode(join.getSourceNode()) && - join.getLastAcceptedTerm() == localAcceptedTerm && - join.getLastAcceptedVersion() == localAcceptedVersion)) { + // if there's a vote from a full master node with same state (i.e. last accepted term and version match), then that node + // should become master instead, so we should stand down. There are two exceptional cases, however: + // 1) if we are in term 0. In that case, we allow electing the voting-only node to avoid poisonous situations where only + // voting-only nodes are bootstrapped. + // 2) if there is another full master node with an older state. In that case, we ensure that + // satisfiesAdditionalQuorumConstraints cannot go from true to false when adding new joinVotes in the same election. + // As voting-only nodes only broadcast the state to the full master nodes, eventually all of them will have caught up + // and there should not be any remaining full master nodes with older state, effectively disabling election of + // voting-only nodes. + if (joinVotes.getJoins().stream().anyMatch(fullMasterWithSameState(localAcceptedTerm, localAcceptedVersion)) && + localAcceptedTerm > 0 && + joinVotes.getJoins().stream().noneMatch(fullMasterWithOlderState(localAcceptedTerm, localAcceptedVersion))) { return false; } } return true; } + + private static Predicate fullMasterWithSameState(long localAcceptedTerm, long localAcceptedVersion) { + return join -> isFullMasterNode(join.getSourceNode()) && + join.getLastAcceptedTerm() == localAcceptedTerm && + join.getLastAcceptedVersion() == localAcceptedVersion; + } + + private static Predicate fullMasterWithOlderState(long localAcceptedTerm, long localAcceptedVersion) { + return join -> isFullMasterNode(join.getSourceNode()) && + (join.getLastAcceptedTerm() < localAcceptedTerm || + (join.getLastAcceptedTerm() == localAcceptedTerm && join.getLastAcceptedVersion() < localAcceptedVersion)); + } } static class VotingOnlyNodeAsyncSender implements TransportInterceptor.AsyncSender { diff --git a/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeCoordinatorTests.java b/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeCoordinatorTests.java index 5048232bd4ecf..08afd3de70f2b 100644 --- a/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeCoordinatorTests.java +++ b/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodeCoordinatorTests.java @@ -11,6 +11,10 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportInterceptor; +import org.junit.BeforeClass; import java.util.Collections; import java.util.Set; @@ -19,7 +23,31 @@ public class VotingOnlyNodeCoordinatorTests extends AbstractCoordinatorTestCase { - @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/43631") + @BeforeClass + public static void setPossibleRolesWithVotingOnly() { + DiscoveryNode.setPossibleRoles( + Sets.union(DiscoveryNodeRole.BUILT_IN_ROLES, Sets.newHashSet(VotingOnlyNodePlugin.VOTING_ONLY_NODE_ROLE))); + } + + @Override + protected TransportInterceptor getTransportInterceptor(DiscoveryNode localNode, ThreadPool threadPool) { + if (VotingOnlyNodePlugin.isVotingOnlyNode(localNode)) { + return new TransportInterceptor() { + @Override + public AsyncSender interceptSender(AsyncSender sender) { + return new VotingOnlyNodePlugin.VotingOnlyNodeAsyncSender(sender, () -> threadPool); + } + }; + } else { + return super.getTransportInterceptor(localNode, threadPool); + } + } + + @Override + protected ElectionStrategy getElectionStrategy() { + return new VotingOnlyNodePlugin.VotingOnlyNodeElectionStrategy(); + } + public void testDoesNotElectVotingOnlyMasterNode() { final Cluster cluster = new Cluster(randomIntBetween(1, 5), false, Settings.EMPTY); cluster.runRandomly(); @@ -27,7 +55,7 @@ public void testDoesNotElectVotingOnlyMasterNode() { final Cluster.ClusterNode leader = cluster.getAnyLeader(); assertTrue(leader.getLocalNode().isMasterNode()); - assertFalse(VotingOnlyNodePlugin.isVotingOnlyNode(leader.getLocalNode())); + assertFalse(leader.getLocalNode().toString(), VotingOnlyNodePlugin.isVotingOnlyNode(leader.getLocalNode())); } @Override diff --git a/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePluginTests.java b/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePluginTests.java index e6d9cbb3c2d2f..c55a405059a70 100644 --- a/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePluginTests.java +++ b/x-pack/plugin/voting-only-node/src/test/java/org/elasticsearch/cluster/coordination/VotingOnlyNodePluginTests.java @@ -68,6 +68,18 @@ public void testPreferFullMasterOverVotingOnlyNodes() throws Exception { equalTo(false)); } + public void testBootstrapOnlyVotingOnlyNodes() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + internalCluster().startNodes(Settings.builder().put(VotingOnlyNodePlugin.VOTING_ONLY_NODE_SETTING.getKey(), true).build(), + Settings.EMPTY, Settings.EMPTY); + assertBusy(() -> assertThat( + client().admin().cluster().prepareState().get().getState().getLastCommittedConfiguration().getNodeIds().size(), + equalTo(3))); + assertThat( + VotingOnlyNodePlugin.isVotingOnlyNode(client().admin().cluster().prepareState().get().getState().nodes().getMasterNode()), + equalTo(false)); + } + public void testVotingOnlyNodesCannotBeMasterWithoutFullMasterNodes() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); internalCluster().startNode(); From 56ee1a5e007e86153f2b43cf9739921041f996c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 27 Jun 2019 18:27:11 +0200 Subject: [PATCH 042/140] Allow reloading of search time analyzers (#43313) Currently changing resources (like dictionaries, synonym files etc...) of search time analyzers is only possible by closing an index, changing the underlying resource (e.g. synonym files) and then re-opening the index for the change to take effect. This PR adds a new API endpoint that allows triggering reloading of certain analysis resources (currently token filters) that will then pick up changes in underlying file resources. To achieve this we introduce a new type of custom analyzer (ReloadableCustomAnalyzer) that uses a ReuseStrategy that allows swapping out analysis components. Custom analyzers that contain filters that are markes as "updateable" will automatically choose this implementation. This PR also adds this capability to `synonym` token filters for use in search time analyzers. Relates to #29051 --- .../client/RestHighLevelClientTests.java | 4 +- .../tokenfilters/synonym-tokenfilter.asciidoc | 2 + .../indices/apis/reload-analyzers.asciidoc | 72 ++++++++ docs/reference/rest-api/index.asciidoc | 2 + .../common/SynonymTokenFilterFactory.java | 13 ++ .../analyze/TransportAnalyzeAction.java | 46 ++--- .../client/IndicesAdminClient.java | 1 + .../org/elasticsearch/client/Requests.java | 1 - .../index/analysis/AnalysisRegistry.java | 9 +- .../index/analysis/AnalyzerComponents.java | 111 ++++++++++++ .../analysis/AnalyzerComponentsProvider.java | 29 +++ .../index/analysis/CustomAnalyzer.java | 51 +++--- .../analysis/CustomAnalyzerProvider.java | 72 +++----- .../index/analysis/IndexAnalyzers.java | 7 + .../index/analysis/NamedAnalyzer.java | 4 +- .../analysis/ReloadableCustomAnalyzer.java | 162 +++++++++++++++++ .../index/mapper/FieldTypeLookup.java | 1 - .../index/mapper/MapperService.java | 22 +++ .../highlight/FragmentBuilderHelper.java | 7 +- .../phrase/PhraseSuggestionBuilder.java | 7 +- .../clear/ClearIndicesCacheResponseTests.java | 2 +- .../indices/flush/FlushResponseTests.java | 2 +- .../forcemerge/ForceMergeResponseTests.java | 2 +- .../indices/refresh/RefreshResponseTests.java | 2 +- .../query/ValidateQueryResponseTests.java | 2 +- .../ReloadableCustomAnalyzerTests.java | 168 ++++++++++++++++++ .../index/mapper/MapperServiceTests.java | 109 +++++++++++- .../AbstractBroadcastResponseTestCase.java | 3 +- .../elasticsearch/xpack/core/XPackPlugin.java | 5 + .../core/action/ReloadAnalyzerAction.java | 23 +++ .../core/action/ReloadAnalyzersRequest.java | 43 +++++ .../core/action/ReloadAnalyzersResponse.java | 89 ++++++++++ .../TransportReloadAnalyzersAction.java | 155 ++++++++++++++++ .../action/RestReloadAnalyzersAction.java | 40 +++++ .../action/ReloadAnalyzersResponseTests.java | 51 ++++++ .../action/ReloadSynonymAnalyzerTests.java | 102 +++++++++++ .../rest/action/ReloadSynonymAnalyzerIT.java | 120 +++++++++++++ .../api/indices.reload_search_analyzers.json | 33 ++++ 38 files changed, 1454 insertions(+), 120 deletions(-) create mode 100644 docs/reference/indices/apis/reload-analyzers.asciidoc create mode 100644 server/src/main/java/org/elasticsearch/index/analysis/AnalyzerComponents.java create mode 100644 server/src/main/java/org/elasticsearch/index/analysis/AnalyzerComponentsProvider.java create mode 100644 server/src/main/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzer.java create mode 100644 server/src/test/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzerTests.java rename {server/src/test/java/org/elasticsearch/action/support/broadcast => test/framework/src/main/java/org/elasticsearch/test}/AbstractBroadcastResponseTestCase.java (98%) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzerAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportReloadAnalyzersAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestReloadAnalyzersAction.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/ReloadSynonymAnalyzerTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rest/action/ReloadSynonymAnalyzerIT.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/indices.reload_search_analyzers.json diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 26e5842019675..650bf2e440321 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -730,8 +730,8 @@ public void testApiNamingConventions() throws Exception { "indices.exists_type", "indices.get_upgrade", "indices.put_alias", - "scripts_painless_execute", - "render_search_template" + "render_search_template", + "scripts_painless_execute" }; //These API are not required for high-level client feature completeness String[] notRequiredApi = new String[] { diff --git a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc index 139f7c3ab0ad0..f47e97d27ea7f 100644 --- a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc @@ -43,6 +43,8 @@ Additional settings are: * `expand` (defaults to `true`). * `lenient` (defaults to `false`). If `true` ignores exceptions while parsing the synonym configuration. It is important to note that only those synonym rules which cannot get parsed are ignored. For instance consider the following request: + + [source,js] -------------------------------------------------- diff --git a/docs/reference/indices/apis/reload-analyzers.asciidoc b/docs/reference/indices/apis/reload-analyzers.asciidoc new file mode 100644 index 0000000000000..f3365968df37c --- /dev/null +++ b/docs/reference/indices/apis/reload-analyzers.asciidoc @@ -0,0 +1,72 @@ +[role="xpack"] +[testenv="basic"] +[[indices-reload-analyzers]] +== Reload Search Analyzers + +experimental[] + +Reloads search analyzers and its resources. + +Synonym filters (both `synonym` and `synonym_graph`) can be declared as +updateable if they are only used in <> +with the `updateable` flag: + +[source,js] +-------------------------------------------------- +PUT /my_index +{ + "settings": { + "index" : { + "analysis" : { + "analyzer" : { + "my_synonyms" : { + "tokenizer" : "whitespace", + "filter" : ["synonym"] + } + }, + "filter" : { + "synonym" : { + "type" : "synonym", + "synonyms_path" : "analysis/synonym.txt", + "updateable" : true <1> + } + } + } + } + }, + "mappings": { + "properties": { + "text": { + "type": "text", + "analyzer" : "standard", + "search_analyzer": "my_synonyms" <2> + } + } + } +} +-------------------------------------------------- +// CONSOLE + +<1> Mark the synonym filter as updateable. +<2> Synonym analyzer is usable as a search_analyzer. + +NOTE: Trying to use the above analyzer as an index analyzer will result in an error. + +Using the <>, you can trigger reloading of the +synonym definition. The contents of the configured synonyms file will be reloaded and the +synonyms definition the filter uses will be updated. + +The `_reload_search_analyzers` API can be run on one or more indices and will trigger +reloading of the synonyms from the configured file. + +NOTE: Reloading will happen on every node the index has shards, so its important +to update the synonym file contents on every data node (even the ones that don't currently +hold shard copies; shards might be relocated there in the future) before calling +reload to ensure the new state of the file is reflected everywhere in the cluster. + +[source,js] +-------------------------------------------------- +POST /my_index/_reload_search_analyzers +-------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT my_index\n/] \ No newline at end of file diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index dc73ac134d566..9f70c2a3cef65 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -15,6 +15,7 @@ not be included yet. * <> * <> * <>, <> +* <> * <> * <> * <> @@ -38,4 +39,5 @@ include::{es-repo-dir}/rollup/rollup-api.asciidoc[] include::{xes-repo-dir}/rest-api/security.asciidoc[] include::{es-repo-dir}/indices/apis/unfreeze.asciidoc[] include::{xes-repo-dir}/rest-api/watcher.asciidoc[] +include::{es-repo-dir}/indices/apis/reload-analyzers.asciidoc[] include::defs.asciidoc[] diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java index 75d4eca4254f8..5d6135549b882 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.Analysis; +import org.elasticsearch.index.analysis.AnalysisMode; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; @@ -50,6 +51,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { private final boolean lenient; protected final Settings settings; protected final Environment environment; + private final boolean updateable; SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { @@ -65,9 +67,15 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { this.expand = settings.getAsBoolean("expand", true); this.lenient = settings.getAsBoolean("lenient", false); this.format = settings.get("format", ""); + this.updateable = settings.getAsBoolean("updateable", false); this.environment = env; } + @Override + public AnalysisMode getAnalysisMode() { + return this.updateable ? AnalysisMode.SEARCH_TIME : AnalysisMode.ALL; + } + @Override public TokenStream create(TokenStream tokenStream) { throw new IllegalStateException("Call createPerAnalyzerSynonymFactory to specialize this factory for an analysis chain first"); @@ -98,6 +106,11 @@ public TokenFilterFactory getSynonymFilter() { // which doesn't support stacked input tokens return IDENTITY_FILTER; } + + @Override + public AnalysisMode getAnalysisMode() { + return updateable ? AnalysisMode.SEARCH_TIME : AnalysisMode.ALL; + } }; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 9a48592b5b8bd..773852860c60e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -42,8 +42,9 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.AnalyzerComponents; +import org.elasticsearch.index.analysis.AnalyzerComponentsProvider; import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.NameOrDefinition; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; @@ -261,18 +262,23 @@ private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.R } } - CustomAnalyzer customAnalyzer = null; - if (analyzer instanceof CustomAnalyzer) { - customAnalyzer = (CustomAnalyzer) analyzer; - } else if (analyzer instanceof NamedAnalyzer && ((NamedAnalyzer) analyzer).analyzer() instanceof CustomAnalyzer) { - customAnalyzer = (CustomAnalyzer) ((NamedAnalyzer) analyzer).analyzer(); + // maybe unwrap analyzer from NamedAnalyzer + Analyzer potentialCustomAnalyzer = analyzer; + if (analyzer instanceof NamedAnalyzer) { + potentialCustomAnalyzer = ((NamedAnalyzer) analyzer).analyzer(); } - if (customAnalyzer != null) { - // customAnalyzer = divide charfilter, tokenizer tokenfilters - CharFilterFactory[] charFilterFactories = customAnalyzer.charFilters(); - TokenizerFactory tokenizerFactory = customAnalyzer.tokenizerFactory(); - TokenFilterFactory[] tokenFilterFactories = customAnalyzer.tokenFilters(); + if (potentialCustomAnalyzer instanceof AnalyzerComponentsProvider) { + AnalyzerComponentsProvider customAnalyzer = (AnalyzerComponentsProvider) potentialCustomAnalyzer; + // note: this is not field-name dependent in our cases so we can leave out the argument + int positionIncrementGap = potentialCustomAnalyzer.getPositionIncrementGap(""); + int offsetGap = potentialCustomAnalyzer.getOffsetGap(""); + AnalyzerComponents components = customAnalyzer.getComponents(); + // divide charfilter, tokenizer tokenfilters + CharFilterFactory[] charFilterFactories = components.getCharFilters(); + TokenizerFactory tokenizerFactory = components.getTokenizerFactory(); + TokenFilterFactory[] tokenFilterFactories = components.getTokenFilters(); + String tokenizerName = components.getTokenizerName(); String[][] charFiltersTexts = new String[charFilterFactories != null ? charFilterFactories.length : 0][request.text().length]; TokenListCreator[] tokenFiltersTokenListCreator = new TokenListCreator[tokenFilterFactories != null ? @@ -298,7 +304,7 @@ private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.R // analyzing only tokenizer Tokenizer tokenizer = tokenizerFactory.create(); tokenizer.setReader(reader); - tokenizerTokenListCreator.analyze(tokenizer, customAnalyzer, includeAttributes); + tokenizerTokenListCreator.analyze(tokenizer, includeAttributes, positionIncrementGap, offsetGap); // analyzing each tokenfilter if (tokenFilterFactories != null) { @@ -308,7 +314,7 @@ private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.R } TokenStream stream = createStackedTokenStream(request.text()[textIndex], charFilterFactories, tokenizerFactory, tokenFilterFactories, tokenFilterIndex + 1); - tokenFiltersTokenListCreator[tokenFilterIndex].analyze(stream, customAnalyzer, includeAttributes); + tokenFiltersTokenListCreator[tokenFilterIndex].analyze(stream, includeAttributes, positionIncrementGap, offsetGap); } } } @@ -331,8 +337,8 @@ private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.R tokenFilterFactories[tokenFilterIndex].name(), tokenFiltersTokenListCreator[tokenFilterIndex].getArrayTokens()); } } - detailResponse = new AnalyzeAction.DetailAnalyzeResponse(charFilteredLists, new AnalyzeAction.AnalyzeTokenList( - customAnalyzer.getTokenizerName(), tokenizerTokenListCreator.getArrayTokens()), tokenFilterLists); + detailResponse = new AnalyzeAction.DetailAnalyzeResponse(charFilteredLists, + new AnalyzeAction.AnalyzeTokenList(tokenizerName, tokenizerTokenListCreator.getArrayTokens()), tokenFilterLists); } else { String name; if (analyzer instanceof NamedAnalyzer) { @@ -343,8 +349,8 @@ private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.R TokenListCreator tokenListCreator = new TokenListCreator(maxTokenCount); for (String text : request.text()) { - tokenListCreator.analyze(analyzer.tokenStream("", text), analyzer, - includeAttributes); + tokenListCreator.analyze(analyzer.tokenStream("", text), includeAttributes, analyzer.getPositionIncrementGap(""), + analyzer.getOffsetGap("")); } detailResponse = new AnalyzeAction.DetailAnalyzeResponse(new AnalyzeAction.AnalyzeTokenList(name, tokenListCreator.getArrayTokens())); @@ -414,7 +420,7 @@ private static class TokenListCreator { tc = new TokenCounter(maxTokenCount); } - private void analyze(TokenStream stream, Analyzer analyzer, Set includeAttributes) { + private void analyze(TokenStream stream, Set includeAttributes, int positionIncrementGap, int offsetGap) { try { stream.reset(); CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); @@ -437,8 +443,8 @@ private void analyze(TokenStream stream, Analyzer analyzer, Set includeA lastOffset += offset.endOffset(); lastPosition += posIncr.getPositionIncrement(); - lastPosition += analyzer.getPositionIncrementGap(""); - lastOffset += analyzer.getOffsetGap(""); + lastPosition += positionIncrementGap; + lastOffset += offsetGap; } catch (IOException e) { throw new ElasticsearchException("failed to analyze", e); diff --git a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index 2afbae944a77e..40c4c1046577b 100644 --- a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -738,4 +738,5 @@ public interface IndicesAdminClient extends ElasticsearchClient { * Swaps the index pointed to by an alias given all provided conditions are satisfied */ void rolloverIndex(RolloverRequest request, ActionListener listener); + } diff --git a/server/src/main/java/org/elasticsearch/client/Requests.java b/server/src/main/java/org/elasticsearch/client/Requests.java index 57480050d2e2f..a3eb23eebfe20 100644 --- a/server/src/main/java/org/elasticsearch/client/Requests.java +++ b/server/src/main/java/org/elasticsearch/client/Requests.java @@ -523,5 +523,4 @@ public static DeleteSnapshotRequest deleteSnapshotRequest(String repository, Str public static SnapshotsStatusRequest snapshotsStatusRequest(String repository) { return new SnapshotsStatusRequest(repository); } - } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 496b1eb3bfaea..51b72680c9316 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -525,7 +525,6 @@ public IndexAnalyzers build(IndexSettings indexSettings, Map tokenizerFactoryFactories, Map charFilterFactoryFactories, Map tokenFilterFactoryFactories) { - Map analyzers = new HashMap<>(); Map normalizers = new HashMap<>(); Map whitespaceNormalizers = new HashMap<>(); @@ -567,9 +566,11 @@ public IndexAnalyzers build(IndexSettings indexSettings, return new IndexAnalyzers(analyzers, normalizers, whitespaceNormalizers); } - private static NamedAnalyzer produceAnalyzer(String name, AnalyzerProvider analyzerFactory, - Map tokenFilters, Map charFilters, - Map tokenizers) { + private static NamedAnalyzer produceAnalyzer(String name, + AnalyzerProvider analyzerFactory, + Map tokenFilters, + Map charFilters, + Map tokenizers) { /* * Lucene defaults positionIncrementGap to 0 in all analyzers but * Elasticsearch defaults them to 0 only before version 2.0 diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalyzerComponents.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalyzerComponents.java new file mode 100644 index 0000000000000..f150ac54558e0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalyzerComponents.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.elasticsearch.common.settings.Settings; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * A class that groups analysis components necessary to produce a custom analyzer. + * See {@link ReloadableCustomAnalyzer} for an example usage. + */ +public final class AnalyzerComponents { + private final String tokenizerName; + private final TokenizerFactory tokenizerFactory; + private final CharFilterFactory[] charFilters; + private final TokenFilterFactory[] tokenFilters; + private final AnalysisMode analysisMode; + + AnalyzerComponents(String tokenizerName, TokenizerFactory tokenizerFactory, CharFilterFactory[] charFilters, + TokenFilterFactory[] tokenFilters) { + this.tokenizerName = tokenizerName; + this.tokenizerFactory = tokenizerFactory; + this.charFilters = charFilters; + this.tokenFilters = tokenFilters; + AnalysisMode mode = AnalysisMode.ALL; + for (TokenFilterFactory f : tokenFilters) { + mode = mode.merge(f.getAnalysisMode()); + } + this.analysisMode = mode; + } + + static AnalyzerComponents createComponents(String name, Settings analyzerSettings, final Map tokenizers, + final Map charFilters, final Map tokenFilters) { + String tokenizerName = analyzerSettings.get("tokenizer"); + if (tokenizerName == null) { + throw new IllegalArgumentException("Custom Analyzer [" + name + "] must be configured with a tokenizer"); + } + + TokenizerFactory tokenizer = tokenizers.get(tokenizerName); + if (tokenizer == null) { + throw new IllegalArgumentException( + "Custom Analyzer [" + name + "] failed to find tokenizer under name " + "[" + tokenizerName + "]"); + } + + List charFilterNames = analyzerSettings.getAsList("char_filter"); + List charFiltersList = new ArrayList<>(charFilterNames.size()); + for (String charFilterName : charFilterNames) { + CharFilterFactory charFilter = charFilters.get(charFilterName); + if (charFilter == null) { + throw new IllegalArgumentException( + "Custom Analyzer [" + name + "] failed to find char_filter under name " + "[" + charFilterName + "]"); + } + charFiltersList.add(charFilter); + } + + List tokenFilterNames = analyzerSettings.getAsList("filter"); + List tokenFilterList = new ArrayList<>(tokenFilterNames.size()); + for (String tokenFilterName : tokenFilterNames) { + TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName); + if (tokenFilter == null) { + throw new IllegalArgumentException( + "Custom Analyzer [" + name + "] failed to find filter under name " + "[" + tokenFilterName + "]"); + } + tokenFilter = tokenFilter.getChainAwareTokenFilterFactory(tokenizer, charFiltersList, tokenFilterList, tokenFilters::get); + tokenFilterList.add(tokenFilter); + } + + return new AnalyzerComponents(tokenizerName, tokenizer, charFiltersList.toArray(new CharFilterFactory[charFiltersList.size()]), + tokenFilterList.toArray(new TokenFilterFactory[tokenFilterList.size()])); + } + + public String getTokenizerName() { + return tokenizerName; + } + + public TokenizerFactory getTokenizerFactory() { + return tokenizerFactory; + } + + public TokenFilterFactory[] getTokenFilters() { + return tokenFilters; + } + + public CharFilterFactory[] getCharFilters() { + return charFilters; + } + + public AnalysisMode analysisMode() { + return this.analysisMode; + } +} \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalyzerComponentsProvider.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalyzerComponentsProvider.java new file mode 100644 index 0000000000000..84a3a14038f92 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalyzerComponentsProvider.java @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +/** + * Analyzers that provide access to their token filters should implement this + */ +public interface AnalyzerComponentsProvider { + + AnalyzerComponents getComponents(); + +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java b/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java index a41ee33564400..685dd2a7de036 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java @@ -25,15 +25,9 @@ import java.io.Reader; -public final class CustomAnalyzer extends Analyzer { - - private final String tokenizerName; - private final TokenizerFactory tokenizerFactory; - - private final CharFilterFactory[] charFilters; - - private final TokenFilterFactory[] tokenFilters; +public final class CustomAnalyzer extends Analyzer implements AnalyzerComponentsProvider { + private final AnalyzerComponents components; private final int positionIncrementGap; private final int offsetGap; private final AnalysisMode analysisMode; @@ -45,10 +39,7 @@ public CustomAnalyzer(String tokenizerName, TokenizerFactory tokenizerFactory, C public CustomAnalyzer(String tokenizerName, TokenizerFactory tokenizerFactory, CharFilterFactory[] charFilters, TokenFilterFactory[] tokenFilters, int positionIncrementGap, int offsetGap) { - this.tokenizerName = tokenizerName; - this.tokenizerFactory = tokenizerFactory; - this.charFilters = charFilters; - this.tokenFilters = tokenFilters; + this.components = new AnalyzerComponents(tokenizerName, tokenizerFactory, charFilters, tokenFilters); this.positionIncrementGap = positionIncrementGap; this.offsetGap = offsetGap; // merge and transfer token filter analysis modes with analyzer @@ -63,19 +54,19 @@ public CustomAnalyzer(String tokenizerName, TokenizerFactory tokenizerFactory, C * The name of the tokenizer as configured by the user. */ public String getTokenizerName() { - return tokenizerName; + return this.components.getTokenizerName(); } public TokenizerFactory tokenizerFactory() { - return tokenizerFactory; + return this.components.getTokenizerFactory(); } public TokenFilterFactory[] tokenFilters() { - return tokenFilters; + return this.components.getTokenFilters(); } public CharFilterFactory[] charFilters() { - return charFilters; + return this.components.getCharFilters(); } @Override @@ -95,11 +86,16 @@ public AnalysisMode getAnalysisMode() { return this.analysisMode; } + @Override + public AnalyzerComponents getComponents() { + return this.components; + } + @Override protected TokenStreamComponents createComponents(String fieldName) { - Tokenizer tokenizer = tokenizerFactory.create(); + Tokenizer tokenizer = this.tokenizerFactory().create(); TokenStream tokenStream = tokenizer; - for (TokenFilterFactory tokenFilter : tokenFilters) { + for (TokenFilterFactory tokenFilter : tokenFilters()) { tokenStream = tokenFilter.create(tokenStream); } return new TokenStreamComponents(tokenizer, tokenStream); @@ -107,6 +103,7 @@ protected TokenStreamComponents createComponents(String fieldName) { @Override protected Reader initReader(String fieldName, Reader reader) { + CharFilterFactory[] charFilters = charFilters(); if (charFilters != null && charFilters.length > 0) { for (CharFilterFactory charFilter : charFilters) { reader = charFilter.create(reader); @@ -117,18 +114,18 @@ protected Reader initReader(String fieldName, Reader reader) { @Override protected Reader initReaderForNormalization(String fieldName, Reader reader) { - for (CharFilterFactory charFilter : charFilters) { - reader = charFilter.normalize(reader); - } - return reader; + for (CharFilterFactory charFilter : charFilters()) { + reader = charFilter.normalize(reader); + } + return reader; } @Override protected TokenStream normalize(String fieldName, TokenStream in) { - TokenStream result = in; - for (TokenFilterFactory filter : tokenFilters) { - result = filter.normalize(result); - } - return result; + TokenStream result = in; + for (TokenFilterFactory filter : tokenFilters()) { + result = filter.normalize(result); + } + return result; } } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java index 8080a6af876a4..d8a50838e9df4 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java @@ -19,23 +19,24 @@ package org.elasticsearch.index.analysis; +import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.TextFieldMapper; -import java.util.ArrayList; -import java.util.List; import java.util.Map; +import static org.elasticsearch.index.analysis.AnalyzerComponents.createComponents; + /** * A custom analyzer that is built out of a single {@link org.apache.lucene.analysis.Tokenizer} and a list * of {@link org.apache.lucene.analysis.TokenFilter}s. */ -public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider { +public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final Settings analyzerSettings; - private CustomAnalyzer customAnalyzer; + private Analyzer customAnalyzer; public CustomAnalyzerProvider(IndexSettings indexSettings, String name, Settings settings) { @@ -43,58 +44,33 @@ public CustomAnalyzerProvider(IndexSettings indexSettings, this.analyzerSettings = settings; } - void build(final Map tokenizers, final Map charFilters, - final Map tokenFilters) { - String tokenizerName = analyzerSettings.get("tokenizer"); - if (tokenizerName == null) { - throw new IllegalArgumentException("Custom Analyzer [" + name() + "] must be configured with a tokenizer"); - } - - TokenizerFactory tokenizer = tokenizers.get(tokenizerName); - if (tokenizer == null) { - throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name " + - "[" + tokenizerName + "]"); - } - - List charFilterNames = analyzerSettings.getAsList("char_filter"); - List charFiltersList = new ArrayList<>(charFilterNames.size()); - for (String charFilterName : charFilterNames) { - CharFilterFactory charFilter = charFilters.get(charFilterName); - if (charFilter == null) { - throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find char_filter under name " + - "[" + charFilterName + "]"); - } - charFiltersList.add(charFilter); - } + void build(final Map tokenizers, + final Map charFilters, + final Map tokenFilters) { + customAnalyzer = create(name(), analyzerSettings, tokenizers, charFilters, tokenFilters); + } + /** + * Factory method that either returns a plain {@link ReloadableCustomAnalyzer} if the components used for creation are supporting index + * and search time use, or a {@link ReloadableCustomAnalyzer} if the components are intended for search time use only. + */ + private static Analyzer create(String name, Settings analyzerSettings, Map tokenizers, + Map charFilters, + Map tokenFilters) { int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP; - positionIncrementGap = analyzerSettings.getAsInt("position_increment_gap", positionIncrementGap); - int offsetGap = analyzerSettings.getAsInt("offset_gap", -1); - - List tokenFilterNames = analyzerSettings.getAsList("filter"); - List tokenFilterList = new ArrayList<>(tokenFilterNames.size()); - for (String tokenFilterName : tokenFilterNames) { - TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName); - if (tokenFilter == null) { - throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find filter under name " + - "[" + tokenFilterName + "]"); - } - tokenFilter = tokenFilter.getChainAwareTokenFilterFactory(tokenizer, charFiltersList, tokenFilterList, tokenFilters::get); - tokenFilterList.add(tokenFilter); + AnalyzerComponents components = createComponents(name, analyzerSettings, tokenizers, charFilters, tokenFilters); + if (components.analysisMode().equals(AnalysisMode.SEARCH_TIME)) { + return new ReloadableCustomAnalyzer(components, positionIncrementGap, offsetGap); + } else { + return new CustomAnalyzer(components.getTokenizerName(), components.getTokenizerFactory(), components.getCharFilters(), + components.getTokenFilters(), positionIncrementGap, offsetGap); } - - this.customAnalyzer = new CustomAnalyzer(tokenizerName, tokenizer, - charFiltersList.toArray(new CharFilterFactory[charFiltersList.size()]), - tokenFilterList.toArray(new TokenFilterFactory[tokenFilterList.size()]), - positionIncrementGap, - offsetGap - ); } @Override - public CustomAnalyzer get() { + public Analyzer get() { return this.customAnalyzer; } } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java b/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java index 900a6560a666c..be77df42cb9c3 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java @@ -62,6 +62,13 @@ public NamedAnalyzer get(String name) { return analyzers.get(name); } + /** + * Returns an (unmodifiable) map of containing the index analyzers + */ + public Map getAnalyzers() { + return analyzers; + } + /** * Returns a normalizer mapped to the given name or null if not present */ diff --git a/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java b/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java index 4831d88f3aa1f..0c53cc323d678 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java @@ -112,8 +112,8 @@ public void checkAllowedInMode(AnalysisMode mode) { return; // everything allowed if this analyzer is in ALL mode } if (this.getAnalysisMode() != mode) { - if (analyzer instanceof CustomAnalyzer) { - TokenFilterFactory[] tokenFilters = ((CustomAnalyzer) analyzer).tokenFilters(); + if (analyzer instanceof AnalyzerComponentsProvider) { + TokenFilterFactory[] tokenFilters = ((AnalyzerComponentsProvider) analyzer).getComponents().getTokenFilters(); List offendingFilters = new ArrayList<>(); for (TokenFilterFactory tokenFilter : tokenFilters) { if (tokenFilter.getAnalysisMode() != mode) { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzer.java b/server/src/main/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzer.java new file mode 100644 index 0000000000000..7d3b8532caeb0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzer.java @@ -0,0 +1,162 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.util.CloseableThreadLocal; +import org.elasticsearch.common.settings.Settings; + +import java.io.Reader; +import java.util.Map; + +public final class ReloadableCustomAnalyzer extends Analyzer implements AnalyzerComponentsProvider { + + private volatile AnalyzerComponents components; + + private CloseableThreadLocal storedComponents = new CloseableThreadLocal<>(); + + private final int positionIncrementGap; + + private final int offsetGap; + + /** + * An alternative {@link ReuseStrategy} that allows swapping the stored analyzer components when they change. + * This is used to change e.g. token filters in search time analyzers. + */ + private static final ReuseStrategy UPDATE_STRATEGY = new ReuseStrategy() { + @Override + public TokenStreamComponents getReusableComponents(Analyzer analyzer, String fieldName) { + ReloadableCustomAnalyzer custom = (ReloadableCustomAnalyzer) analyzer; + AnalyzerComponents components = custom.getComponents(); + AnalyzerComponents storedComponents = custom.getStoredComponents(); + if (storedComponents == null || components != storedComponents) { + custom.setStoredComponents(components); + return null; + } + TokenStreamComponents tokenStream = (TokenStreamComponents) getStoredValue(analyzer); + assert tokenStream != null; + return tokenStream; + } + + @Override + public void setReusableComponents(Analyzer analyzer, String fieldName, TokenStreamComponents tokenStream) { + setStoredValue(analyzer, tokenStream); + } + }; + + ReloadableCustomAnalyzer(AnalyzerComponents components, int positionIncrementGap, int offsetGap) { + super(UPDATE_STRATEGY); + if (components.analysisMode().equals(AnalysisMode.SEARCH_TIME) == false) { + throw new IllegalArgumentException( + "ReloadableCustomAnalyzer must only be initialized with analysis components in AnalysisMode.SEARCH_TIME mode"); + } + this.components = components; + this.positionIncrementGap = positionIncrementGap; + this.offsetGap = offsetGap; + } + + @Override + public AnalyzerComponents getComponents() { + return this.components; + } + + @Override + public int getPositionIncrementGap(String fieldName) { + return this.positionIncrementGap; + } + + @Override + public int getOffsetGap(String field) { + if (this.offsetGap < 0) { + return super.getOffsetGap(field); + } + return this.offsetGap; + } + + public AnalysisMode getAnalysisMode() { + return this.components.analysisMode(); + } + + @Override + protected Reader initReaderForNormalization(String fieldName, Reader reader) { + final AnalyzerComponents components = getComponents(); + for (CharFilterFactory charFilter : components.getCharFilters()) { + reader = charFilter.normalize(reader); + } + return reader; + } + + @Override + protected TokenStream normalize(String fieldName, TokenStream in) { + final AnalyzerComponents components = getComponents(); + TokenStream result = in; + for (TokenFilterFactory filter : components.getTokenFilters()) { + result = filter.normalize(result); + } + return result; + } + + public synchronized void reload(String name, + Settings settings, + final Map tokenizers, + final Map charFilters, + final Map tokenFilters) { + AnalyzerComponents components = AnalyzerComponents.createComponents(name, settings, tokenizers, charFilters, tokenFilters); + this.components = components; + } + + @Override + public void close() { + super.close(); + storedComponents.close(); + } + + private void setStoredComponents(AnalyzerComponents components) { + storedComponents.set(components); + } + + private AnalyzerComponents getStoredComponents() { + return storedComponents.get(); + } + + @Override + protected TokenStreamComponents createComponents(String fieldName) { + final AnalyzerComponents components = getStoredComponents(); + Tokenizer tokenizer = components.getTokenizerFactory().create(); + TokenStream tokenStream = tokenizer; + for (TokenFilterFactory tokenFilter : components.getTokenFilters()) { + tokenStream = tokenFilter.create(tokenStream); + } + return new TokenStreamComponents(tokenizer, tokenStream); + } + + @Override + protected Reader initReader(String fieldName, Reader reader) { + final AnalyzerComponents components = getStoredComponents(); + if (components.getCharFilters() != null && components.getCharFilters().length > 0) { + for (CharFilterFactory charFilter : components.getCharFilters()) { + reader = charFilter.create(reader); + } + } + return reader; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 27d061d8c2788..0dc8b6a00c09e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -86,7 +86,6 @@ public FieldTypeLookup copyAndAddAll(String type, return new FieldTypeLookup(fullName, aliases); } - /** Returns the field for the given field */ public MappedFieldType get(String field) { String concreteField = aliasToConcreteName.getOrDefault(field, field); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index fc33dcafc63b6..0c6120939ca39 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; @@ -46,8 +47,13 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSortConfig; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.ReloadableCustomAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityService; @@ -841,4 +847,20 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { return defaultAnalyzer; } } + + public synchronized void reloadSearchAnalyzers(AnalysisRegistry registry) throws IOException { + logger.info("reloading search analyzers"); + // refresh indexAnalyzers and search analyzers + final Map tokenizerFactories = registry.buildTokenizerFactories(indexSettings); + final Map charFilterFactories = registry.buildCharFilterFactories(indexSettings); + final Map tokenFilterFactories = registry.buildTokenFilterFactories(indexSettings); + final Map settings = indexSettings.getSettings().getGroups("index.analysis.analyzer"); + for (NamedAnalyzer namedAnalyzer : indexAnalyzers.getAnalyzers().values()) { + if (namedAnalyzer.analyzer() instanceof ReloadableCustomAnalyzer) { + ReloadableCustomAnalyzer analyzer = (ReloadableCustomAnalyzer) namedAnalyzer.analyzer(); + Settings analyzerSettings = settings.get(namedAnalyzer.name()); + analyzer.reload(namedAnalyzer.name(), analyzerSettings, tokenizerFactories, charFilterFactories, tokenFilterFactories); + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java index 583516c5cd4c2..d896d3a9d922d 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java @@ -26,7 +26,7 @@ import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo.SubInfo; import org.apache.lucene.search.vectorhighlight.FragmentsBuilder; import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.AnalyzerComponentsProvider; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.mapper.MappedFieldType; @@ -81,9 +81,8 @@ private static boolean containsBrokenAnalysis(Analyzer analyzer) { if (analyzer instanceof NamedAnalyzer) { analyzer = ((NamedAnalyzer) analyzer).analyzer(); } - if (analyzer instanceof CustomAnalyzer) { - final CustomAnalyzer a = (CustomAnalyzer) analyzer; - TokenFilterFactory[] tokenFilters = a.tokenFilters(); + if (analyzer instanceof AnalyzerComponentsProvider) { + final TokenFilterFactory[] tokenFilters = ((AnalyzerComponentsProvider) analyzer).getComponents().getTokenFilters(); for (TokenFilterFactory tokenFilterFactory : tokenFilters) { if (tokenFilterFactory.breaksFastVectorHighlighter()) { return true; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index 74b9437d67821..5b66badc733e0 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.AnalyzerComponentsProvider; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; @@ -675,9 +675,8 @@ private static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyze if (analyzer instanceof NamedAnalyzer) { analyzer = ((NamedAnalyzer)analyzer).analyzer(); } - if (analyzer instanceof CustomAnalyzer) { - final CustomAnalyzer a = (CustomAnalyzer) analyzer; - final TokenFilterFactory[] tokenFilters = a.tokenFilters(); + if (analyzer instanceof AnalyzerComponentsProvider) { + final TokenFilterFactory[] tokenFilters = ((AnalyzerComponentsProvider) analyzer).getComponents().getTokenFilters(); for (TokenFilterFactory tokenFilterFactory : tokenFilters) { if (tokenFilterFactory instanceof ShingleTokenFilterFactory) { return ((ShingleTokenFilterFactory)tokenFilterFactory).getInnerFactory(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java index 4b58d5fb70246..76e96b9b05574 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.AbstractBroadcastResponseTestCase; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import java.util.List; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java index 7b7cb8578e8b3..029426968f93c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.action.admin.indices.flush; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.AbstractBroadcastResponseTestCase; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import java.util.List; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java index f5e86fdcdfe9b..1ce73edf4a914 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.action.admin.indices.forcemerge; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.AbstractBroadcastResponseTestCase; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import java.util.List; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java index b3d97f2edd448..dbe96aa387c99 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.AbstractBroadcastResponseTestCase; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import java.util.List; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java index d72aae8fa2bd1..e5ba103e2d207 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java @@ -21,9 +21,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.AbstractBroadcastResponseTestCase; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/test/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzerTests.java new file mode 100644 index 0000000000000..e60df7e2ce1a4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzerTests.java @@ -0,0 +1,168 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.LowerCaseFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.index.analysis.AnalyzerComponents.createComponents; + +public class ReloadableCustomAnalyzerTests extends ESTestCase { + + private static TestAnalysis testAnalysis; + private static Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + + private static TokenFilterFactory NO_OP_SEARCH_TIME_FILTER = new AbstractTokenFilterFactory( + IndexSettingsModule.newIndexSettings("index", settings), "my_filter", Settings.EMPTY) { + @Override + public AnalysisMode getAnalysisMode() { + return AnalysisMode.SEARCH_TIME; + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return tokenStream; + } + }; + + private static TokenFilterFactory LOWERCASE_SEARCH_TIME_FILTER = new AbstractTokenFilterFactory( + IndexSettingsModule.newIndexSettings("index", settings), "my_other_filter", Settings.EMPTY) { + @Override + public AnalysisMode getAnalysisMode() { + return AnalysisMode.SEARCH_TIME; + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return new LowerCaseFilter(tokenStream); + } + }; + + @BeforeClass + public static void setup() throws IOException { + testAnalysis = createTestAnalysis(new Index("test", "_na_"), settings); + } + + /** + * test constructor and getters + */ + public void testBasicCtor() { + int positionIncrementGap = randomInt(); + int offsetGap = randomInt(); + + Settings analyzerSettings = Settings.builder() + .put("tokenizer", "standard") + .putList("filter", "my_filter") + .build(); + + AnalyzerComponents components = createComponents("my_analyzer", analyzerSettings, testAnalysis.tokenizer, testAnalysis.charFilter, + Collections.singletonMap("my_filter", NO_OP_SEARCH_TIME_FILTER)); + + try (ReloadableCustomAnalyzer analyzer = new ReloadableCustomAnalyzer(components, positionIncrementGap, offsetGap)) { + assertEquals(positionIncrementGap, analyzer.getPositionIncrementGap(randomAlphaOfLength(5))); + assertEquals(offsetGap >= 0 ? offsetGap : 1, analyzer.getOffsetGap(randomAlphaOfLength(5))); + assertEquals("standard", analyzer.getComponents().getTokenizerName()); + assertEquals(0, analyzer.getComponents().getCharFilters().length); + assertSame(testAnalysis.tokenizer.get("standard"), analyzer.getComponents().getTokenizerFactory()); + assertEquals(1, analyzer.getComponents().getTokenFilters().length); + assertSame(NO_OP_SEARCH_TIME_FILTER, analyzer.getComponents().getTokenFilters()[0]); + } + + // check that when using regular non-search time filters only, we get an exception + final Settings indexAnalyzerSettings = Settings.builder() + .put("tokenizer", "standard") + .putList("filter", "lowercase") + .build(); + AnalyzerComponents indexAnalyzerComponents = createComponents("my_analyzer", indexAnalyzerSettings, testAnalysis.tokenizer, + testAnalysis.charFilter, testAnalysis.tokenFilter); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> new ReloadableCustomAnalyzer(indexAnalyzerComponents, positionIncrementGap, offsetGap)); + assertEquals("ReloadableCustomAnalyzer must only be initialized with analysis components in AnalysisMode.SEARCH_TIME mode", + ex.getMessage()); + } + + /** + * start multiple threads that create token streams from this analyzer until reloaded tokenfilter takes effect + */ + public void testReloading() throws IOException, InterruptedException { + Settings analyzerSettings = Settings.builder() + .put("tokenizer", "standard") + .putList("filter", "my_filter") + .build(); + + AnalyzerComponents components = createComponents("my_analyzer", analyzerSettings, testAnalysis.tokenizer, testAnalysis.charFilter, + Collections.singletonMap("my_filter", NO_OP_SEARCH_TIME_FILTER)); + int numThreads = randomIntBetween(5, 10); + + ExecutorService executorService = Executors.newFixedThreadPool(numThreads); + CountDownLatch firstCheckpoint = new CountDownLatch(numThreads); + CountDownLatch secondCheckpoint = new CountDownLatch(numThreads); + + try (ReloadableCustomAnalyzer analyzer = new ReloadableCustomAnalyzer(components, 0, 0)) { + executorService.submit(() -> { + while (secondCheckpoint.getCount() > 0) { + try (TokenStream firstTokenStream = analyzer.tokenStream("myField", "TEXT")) { + firstTokenStream.reset(); + CharTermAttribute term = firstTokenStream.addAttribute(CharTermAttribute.class); + assertTrue(firstTokenStream.incrementToken()); + if (term.toString().equals("TEXT")) { + firstCheckpoint.countDown(); + } + if (term.toString().equals("text")) { + secondCheckpoint.countDown(); + } + assertFalse(firstTokenStream.incrementToken()); + firstTokenStream.end(); + } catch (Exception e) { + throw ExceptionsHelper.convertToRuntime(e); + } + } + }); + + // wait until all running threads have seen the unaltered upper case analysis at least once + assertTrue(firstCheckpoint.await(5, TimeUnit.SECONDS)); + + analyzer.reload("my_analyzer", analyzerSettings, testAnalysis.tokenizer, testAnalysis.charFilter, + Collections.singletonMap("my_filter", LOWERCASE_SEARCH_TIME_FILTER)); + + // wait until all running threads have seen the new lower case analysis at least once + assertTrue(secondCheckpoint.await(5, TimeUnit.SECONDS)); + + executorService.shutdown(); + executorService.awaitTermination(1, TimeUnit.SECONDS); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index d8c120e492d31..6bdfc167dec8b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.analysis.TokenStream; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -27,11 +29,21 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisMode; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.ReloadableCustomAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.indices.InvalidTypeNameException; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -39,6 +51,8 @@ import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; import static org.hamcrest.CoreMatchers.containsString; @@ -49,7 +63,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return Collections.singleton(InternalSettingsPlugin.class); + return List.of(InternalSettingsPlugin.class, ReloadableFilterPlugin.class); } public void testTypeNameStartsWithIllegalDot() { @@ -434,4 +448,97 @@ public void testMappingRecoverySkipFieldNameLengthLimit() throws Throwable { assertEquals(testString, documentMapper.mappers().getMapper(testString).simpleName()); } + public void testReloadSearchAnalyzers() throws IOException { + Settings settings = Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put("index.analysis.analyzer.reloadableAnalyzer.type", "custom") + .put("index.analysis.analyzer.reloadableAnalyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.reloadableAnalyzer.filter", "myReloadableFilter").build(); + + MapperService mapperService = createIndex("test_index", settings).mapperService(); + CompressedXContent mapping = new CompressedXContent(BytesReference.bytes( + XContentFactory.jsonBuilder().startObject().startObject("_doc") + .startObject("properties") + .startObject("field") + .field("type", "text") + .field("analyzer", "simple") + .field("search_analyzer", "reloadableAnalyzer") + .field("search_quote_analyzer", "stop") + .endObject() + .startObject("otherField") + .field("type", "text") + .field("analyzer", "standard") + .field("search_analyzer", "simple") + .field("search_quote_analyzer", "reloadableAnalyzer") + .endObject() + .endObject() + .endObject().endObject())); + + mapperService.merge("_doc", mapping, MergeReason.MAPPING_UPDATE); + IndexAnalyzers current = mapperService.getIndexAnalyzers(); + + ReloadableCustomAnalyzer originalReloadableAnalyzer = (ReloadableCustomAnalyzer) current.get("reloadableAnalyzer").analyzer(); + TokenFilterFactory[] originalTokenFilters = originalReloadableAnalyzer.getComponents().getTokenFilters(); + assertEquals(1, originalTokenFilters.length); + assertEquals("myReloadableFilter", originalTokenFilters[0].name()); + + // now reload, this should change the tokenfilterFactory inside the analyzer + mapperService.reloadSearchAnalyzers(getInstanceFromNode(AnalysisRegistry.class)); + IndexAnalyzers updatedAnalyzers = mapperService.getIndexAnalyzers(); + assertSame(current, updatedAnalyzers); + assertSame(current.getDefaultIndexAnalyzer(), updatedAnalyzers.getDefaultIndexAnalyzer()); + assertSame(current.getDefaultSearchAnalyzer(), updatedAnalyzers.getDefaultSearchAnalyzer()); + assertSame(current.getDefaultSearchQuoteAnalyzer(), updatedAnalyzers.getDefaultSearchQuoteAnalyzer()); + + assertFalse(assertSameContainedFilters(originalTokenFilters, current.get("reloadableAnalyzer"))); + assertFalse(assertSameContainedFilters(originalTokenFilters, mapperService.fullName("field").searchAnalyzer())); + assertFalse(assertSameContainedFilters(originalTokenFilters, mapperService.fullName("otherField").searchQuoteAnalyzer())); + } + + private boolean assertSameContainedFilters(TokenFilterFactory[] originalTokenFilter, NamedAnalyzer updatedAnalyzer) { + ReloadableCustomAnalyzer updatedReloadableAnalyzer = (ReloadableCustomAnalyzer) updatedAnalyzer.analyzer(); + TokenFilterFactory[] newTokenFilters = updatedReloadableAnalyzer.getComponents().getTokenFilters(); + assertEquals(originalTokenFilter.length, newTokenFilters.length); + int i = 0; + for (TokenFilterFactory tf : newTokenFilters ) { + assertEquals(originalTokenFilter[i].name(), tf.name()); + if (originalTokenFilter[i] != tf) { + return false; + } + i++; + } + return true; + } + + public static final class ReloadableFilterPlugin extends Plugin implements AnalysisPlugin { + + @Override + public Map> getTokenFilters() { + return Collections.singletonMap("myReloadableFilter", new AnalysisProvider() { + + @Override + public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) + throws IOException { + return new TokenFilterFactory() { + + @Override + public String name() { + return "myReloadableFilter"; + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return tokenStream; + } + + @Override + public AnalysisMode getAnalysisMode() { + return AnalysisMode.SEARCH_TIME; + } + }; + } + }); + } + } + } diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/AbstractBroadcastResponseTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java similarity index 98% rename from server/src/test/java/org/elasticsearch/action/support/broadcast/AbstractBroadcastResponseTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java index 5bf48fa589764..87084577baaef 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/AbstractBroadcastResponseTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java @@ -17,10 +17,11 @@ * under the License. */ -package org.elasticsearch.action.support.broadcast; +package org.elasticsearch.test; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 50cfcd5abaff7..4a7ab36c802b3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -56,7 +56,9 @@ import org.elasticsearch.snapshots.SourceOnlySnapshotRepository; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.core.action.ReloadAnalyzerAction; import org.elasticsearch.xpack.core.action.TransportFreezeIndexAction; +import org.elasticsearch.xpack.core.action.TransportReloadAnalyzersAction; import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; import org.elasticsearch.xpack.core.action.TransportXPackUsageAction; import org.elasticsearch.xpack.core.action.XPackInfoAction; @@ -64,6 +66,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.rest.action.RestFreezeIndexAction; +import org.elasticsearch.xpack.core.rest.action.RestReloadAnalyzersAction; import org.elasticsearch.xpack.core.rest.action.RestXPackInfoAction; import org.elasticsearch.xpack.core.rest.action.RestXPackUsageAction; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; @@ -249,6 +252,7 @@ public Collection createComponents(Client client, ClusterService cluster actions.add(new ActionHandler<>(TransportFreezeIndexAction.FreezeIndexAction.INSTANCE, TransportFreezeIndexAction.class)); actions.addAll(licensing.getActions()); + actions.add(new ActionHandler<>(ReloadAnalyzerAction.INSTANCE, TransportReloadAnalyzersAction.class)); return actions; } @@ -285,6 +289,7 @@ public List getRestHandlers(Settings settings, RestController restC handlers.add(new RestXPackInfoAction(settings, restController)); handlers.add(new RestXPackUsageAction(settings, restController)); handlers.add(new RestFreezeIndexAction(settings, restController)); + handlers.add(new RestReloadAnalyzersAction(settings, restController)); handlers.addAll(licensing.getRestHandlers(settings, restController, clusterSettings, indexScopedSettings, settingsFilter, indexNameExpressionResolver, nodesInCluster)); return handlers; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzerAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzerAction.java new file mode 100644 index 0000000000000..f37df1ec820eb --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzerAction.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.action; + +import org.elasticsearch.action.Action; + +public class ReloadAnalyzerAction extends Action { + + public static final ReloadAnalyzerAction INSTANCE = new ReloadAnalyzerAction(); + public static final String NAME = "indices:admin/reload_analyzers"; + + private ReloadAnalyzerAction() { + super(NAME); + } + + @Override + public ReloadAnalyzersResponse newResponse() { + return new ReloadAnalyzersResponse(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersRequest.java new file mode 100644 index 0000000000000..8721abd3403a7 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersRequest.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.action; + +import org.elasticsearch.action.support.broadcast.BroadcastRequest; + +import java.util.Arrays; +import java.util.Objects; + +/** + * Request for reloading index search analyzers + */ +public class ReloadAnalyzersRequest extends BroadcastRequest { + + /** + * Constructs a new request for reloading index search analyzers for one or more indices + */ + public ReloadAnalyzersRequest(String... indices) { + super(indices); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ReloadAnalyzersRequest that = (ReloadAnalyzersRequest) o; + return Objects.equals(indicesOptions(), that.indicesOptions()) + && Arrays.equals(indices, that.indices); + } + + @Override + public int hashCode() { + return Objects.hash(indicesOptions(), Arrays.hashCode(indices)); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponse.java new file mode 100644 index 0000000000000..263dcf7debd3b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponse.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.action; + +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * The response object that will be returned when reloading analyzers + */ +public class ReloadAnalyzersResponse extends BroadcastResponse { + + private final Map> reloadedIndicesNodes; + + public ReloadAnalyzersResponse() { + reloadedIndicesNodes = Collections.emptyMap(); + } + + public ReloadAnalyzersResponse(int totalShards, int successfulShards, int failedShards, + List shardFailures, Map> reloadedIndicesNodes) { + super(totalShards, successfulShards, failedShards, shardFailures); + this.reloadedIndicesNodes = reloadedIndicesNodes; + } + + /** + * Override in subclass to add custom fields following the common `_shards` field + */ + @Override + protected void addCustomXContentFields(XContentBuilder builder, Params params) throws IOException { + builder.startArray("reloaded_nodes"); + for (Entry> indexNodesReloaded : reloadedIndicesNodes.entrySet()) { + builder.startObject(); + builder.field("index", indexNodesReloaded.getKey()); + builder.field("reloaded_node_ids", indexNodesReloaded.getValue()); + builder.endObject(); + } + builder.endArray(); + } + + @SuppressWarnings({ "unchecked" }) + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("reload_analyzer", + true, arg -> { + BroadcastResponse response = (BroadcastResponse) arg[0]; + List>> results = (List>>) arg[1]; + Map> reloadedNodeIds = new HashMap<>(); + for (Tuple> result : results) { + reloadedNodeIds.put(result.v1(), result.v2()); + } + return new ReloadAnalyzersResponse(response.getTotalShards(), response.getSuccessfulShards(), response.getFailedShards(), + Arrays.asList(response.getShardFailures()), reloadedNodeIds); + }); + + @SuppressWarnings({ "unchecked" }) + private static final ConstructingObjectParser>, Void> ENTRY_PARSER = new ConstructingObjectParser<>( + "reload_analyzer.entry", true, arg -> { + String index = (String) arg[0]; + List nodeIds = (List) arg[1]; + return new Tuple<>(index, nodeIds); + }); + + static { + declareBroadcastFields(PARSER); + PARSER.declareObjectArray(constructorArg(), ENTRY_PARSER, new ParseField("reloaded_nodes")); + ENTRY_PARSER.declareString(constructorArg(), new ParseField("index")); + ENTRY_PARSER.declareStringArray(constructorArg(), new ParseField("reloaded_node_ids")); + } + + public static ReloadAnalyzersResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportReloadAnalyzersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportReloadAnalyzersAction.java new file mode 100644 index 0000000000000..d9c0b6f243d09 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportReloadAnalyzersAction.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.PlainShardsIterator; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.TransportReloadAnalyzersAction.ReloadResult; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + + +/** + * Indices clear cache action. + */ +public class TransportReloadAnalyzersAction + extends TransportBroadcastByNodeAction { + + private static final Logger logger = LogManager.getLogger(TransportReloadAnalyzersAction.class); + private final IndicesService indicesService; + + @Inject + public TransportReloadAnalyzersAction(ClusterService clusterService, TransportService transportService, IndicesService indicesService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + super(ReloadAnalyzerAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, + ReloadAnalyzersRequest::new, ThreadPool.Names.MANAGEMENT, false); + this.indicesService = indicesService; + } + + @Override + protected ReloadResult readShardResult(StreamInput in) throws IOException { + ReloadResult reloadResult = new ReloadResult(); + reloadResult.readFrom(in); + return reloadResult; + } + + @Override + protected ReloadAnalyzersResponse newResponse(ReloadAnalyzersRequest request, int totalShards, int successfulShards, int failedShards, + List responses, List shardFailures, ClusterState clusterState) { + Map> reloadedIndicesNodes = new HashMap>(); + for (ReloadResult result : responses) { + if (reloadedIndicesNodes.containsKey(result.index)) { + List nodes = reloadedIndicesNodes.get(result.index); + nodes.add(result.nodeId); + } else { + List nodes = new ArrayList<>(); + nodes.add(result.nodeId); + reloadedIndicesNodes.put(result.index, nodes); + } + } + return new ReloadAnalyzersResponse(totalShards, successfulShards, failedShards, shardFailures, reloadedIndicesNodes); + } + + @Override + protected ReloadAnalyzersRequest readRequestFrom(StreamInput in) throws IOException { + final ReloadAnalyzersRequest request = new ReloadAnalyzersRequest(); + request.readFrom(in); + return request; + } + + @Override + protected ReloadResult shardOperation(ReloadAnalyzersRequest request, ShardRouting shardRouting) throws IOException { + logger.info("reloading analyzers for index shard " + shardRouting); + IndexService indexService = indicesService.indexService(shardRouting.index()); + indexService.mapperService().reloadSearchAnalyzers(indicesService.getAnalysis()); + return new ReloadResult(shardRouting.index().getName(), shardRouting.currentNodeId()); + } + + public static final class ReloadResult implements Streamable { + String index; + String nodeId; + + private ReloadResult(String index, String nodeId) { + this.index = index; + this.nodeId = nodeId; + } + + private ReloadResult() { + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.index = in.readString(); + this.nodeId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(index); + out.writeString(nodeId); + } + } + + /** + * The reload request should go to only one shard per node the index lives on + */ + @Override + protected ShardsIterator shards(ClusterState clusterState, ReloadAnalyzersRequest request, String[] concreteIndices) { + RoutingTable routingTable = clusterState.routingTable(); + List shards = new ArrayList<>(); + for (String index : concreteIndices) { + Set nodesCovered = new HashSet<>(); + IndexRoutingTable indexRoutingTable = routingTable.index(index); + for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { + for (ShardRouting shardRouting : indexShardRoutingTable) { + if (nodesCovered.contains(shardRouting.currentNodeId()) == false) { + shards.add(shardRouting); + nodesCovered.add(shardRouting.currentNodeId()); + } + } + } + } + return new PlainShardsIterator(shards); + } + + @Override + protected ClusterBlockException checkGlobalBlock(ClusterState state, ReloadAnalyzersRequest request) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + @Override + protected ClusterBlockException checkRequestBlock(ClusterState state, ReloadAnalyzersRequest request, String[] concreteIndices) { + return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, concreteIndices); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestReloadAnalyzersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestReloadAnalyzersAction.java new file mode 100644 index 0000000000000..3b379e8cebbb5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestReloadAnalyzersAction.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.rest.action; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.action.ReloadAnalyzerAction; +import org.elasticsearch.xpack.core.action.ReloadAnalyzersRequest; + +import java.io.IOException; +public class RestReloadAnalyzersAction extends BaseRestHandler { + + public RestReloadAnalyzersAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, "/{index}/_reload_search_analyzers", this); + controller.registerHandler(RestRequest.Method.POST, "/{index}/_reload_search_analyzers", this); + } + + @Override + public String getName() { + return "reload_search_analyzers_action"; + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + ReloadAnalyzersRequest reloadAnalyzersRequest = new ReloadAnalyzersRequest( + Strings.splitStringByCommaToArray(request.param("index"))); + reloadAnalyzersRequest.indicesOptions(IndicesOptions.fromRequest(request, reloadAnalyzersRequest.indicesOptions())); + return channel -> client.execute(ReloadAnalyzerAction.INSTANCE, reloadAnalyzersRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponseTests.java new file mode 100644 index 0000000000000..cf1ad5909ba6e --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponseTests.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.action; + +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractBroadcastResponseTestCase; +import org.elasticsearch.xpack.core.action.ReloadAnalyzersResponse; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ReloadAnalyzersResponseTests extends AbstractBroadcastResponseTestCase { + + @Override + protected ReloadAnalyzersResponse createTestInstance(int totalShards, int successfulShards, int failedShards, + List failures) { + Map> reloadedIndicesNodes = new HashMap<>(); + int randomIndices = randomIntBetween(0, 5); + for (int i = 0; i < randomIndices; i++) { + List randomNodeIds = Arrays.asList(generateRandomStringArray(5, 5, false, true)); + reloadedIndicesNodes.put(randomAlphaOfLengthBetween(5, 10), randomNodeIds); + } + return new ReloadAnalyzersResponse(totalShards, successfulShards, failedShards, failures, reloadedIndicesNodes); + } + + @Override + protected ReloadAnalyzersResponse doParseInstance(XContentParser parser) throws IOException { + return ReloadAnalyzersResponse.fromXContent(parser); + } + + @Override + public void testToXContent() { + Map> reloadedIndicesNodes = Collections.singletonMap("index", Collections.singletonList("nodeId")); + ReloadAnalyzersResponse response = new ReloadAnalyzersResponse(10, 5, 5, null, reloadedIndicesNodes); + String output = Strings.toString(response); + assertEquals( + "{\"_shards\":{\"total\":10,\"successful\":5,\"failed\":5}," + + "\"reloaded_nodes\":[{\"index\":\"index\",\"reloaded_node_ids\":[\"nodeId\"]}]" + + "}", + output); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/ReloadSynonymAnalyzerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/ReloadSynonymAnalyzerTests.java new file mode 100644 index 0000000000000..e0e8de3d23def --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/ReloadSynonymAnalyzerTests.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.action; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.AnalyzeToken; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.Response; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.analysis.common.CommonAnalysisPlugin; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.action.ReloadAnalyzerAction; +import org.elasticsearch.xpack.core.action.ReloadAnalyzersRequest; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; + +public class ReloadSynonymAnalyzerTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Arrays.asList(LocalStateCompositeXPackPlugin.class, CommonAnalysisPlugin.class); + } + + public void testSynonymsUpdateable() throws FileNotFoundException, IOException { + String synonymsFileName = "synonyms.txt"; + Path configDir = node().getEnvironment().configFile(); + if (Files.exists(configDir) == false) { + Files.createDirectory(configDir); + } + Path synonymsFile = configDir.resolve(synonymsFileName); + if (Files.exists(synonymsFile) == false) { + Files.createFile(synonymsFile); + } + try (PrintWriter out = new PrintWriter( + new OutputStreamWriter(Files.newOutputStream(synonymsFile, StandardOpenOption.WRITE), StandardCharsets.UTF_8))) { + out.println("foo, baz"); + } + + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", 5) + .put("index.number_of_replicas", 0) + .put("analysis.analyzer.my_synonym_analyzer.tokenizer", "standard") + .putList("analysis.analyzer.my_synonym_analyzer.filter", "lowercase", "my_synonym_filter") + .put("analysis.filter.my_synonym_filter.type", "synonym") + .put("analysis.filter.my_synonym_filter.updateable", "true") + .put("analysis.filter.my_synonym_filter.synonyms_path", synonymsFileName)) + .addMapping("_doc", "field", "type=text,analyzer=standard,search_analyzer=my_synonym_analyzer")); + + client().prepareIndex("test", "_doc", "1").setSource("field", "Foo").get(); + assertNoFailures(client().admin().indices().prepareRefresh("test").execute().actionGet()); + + SearchResponse response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + assertHitCount(response, 0L); + Response analyzeResponse = client().admin().indices().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get(); + assertEquals(2, analyzeResponse.getTokens().size()); + assertEquals("foo", analyzeResponse.getTokens().get(0).getTerm()); + assertEquals("baz", analyzeResponse.getTokens().get(1).getTerm()); + + // now update synonyms file and trigger reloading + try (PrintWriter out = new PrintWriter( + new OutputStreamWriter(Files.newOutputStream(synonymsFile, StandardOpenOption.WRITE), StandardCharsets.UTF_8))) { + out.println("foo, baz, buzz"); + } + assertNoFailures(client().execute(ReloadAnalyzerAction.INSTANCE, new ReloadAnalyzersRequest("test")).actionGet()); + + analyzeResponse = client().admin().indices().prepareAnalyze("test", "Foo").setAnalyzer("my_synonym_analyzer").get(); + assertEquals(3, analyzeResponse.getTokens().size()); + Set tokens = new HashSet<>(); + analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); + assertTrue(tokens.contains("foo")); + assertTrue(tokens.contains("baz")); + assertTrue(tokens.contains("buzz")); + + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + assertHitCount(response, 1L); + } +} \ No newline at end of file diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rest/action/ReloadSynonymAnalyzerIT.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rest/action/ReloadSynonymAnalyzerIT.java new file mode 100644 index 0000000000000..790fefb74372d --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rest/action/ReloadSynonymAnalyzerIT.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.rest.action; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.AnalyzeToken; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.Response; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.analysis.common.CommonAnalysisPlugin; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.action.ReloadAnalyzerAction; +import org.elasticsearch.xpack.core.action.ReloadAnalyzersRequest; +import org.elasticsearch.xpack.core.action.ReloadAnalyzersResponse; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; + +public class ReloadSynonymAnalyzerIT extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); + } + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateCompositeXPackPlugin.class, CommonAnalysisPlugin.class); + } + + /** + * This test needs to write to the config directory, this is difficult in an external cluster so we overwrite this to force running with + * {@link InternalTestCluster} + */ + @Override + protected boolean ignoreExternalCluster() { + return true; + } + + public void testSynonymsUpdateable() throws FileNotFoundException, IOException, InterruptedException { + Path config = internalCluster().getInstance(Environment.class).configFile(); + String synonymsFileName = "synonyms.txt"; + Path synonymsFile = config.resolve(synonymsFileName); + Files.createFile(synonymsFile); + assertTrue(Files.exists(synonymsFile)); + try (PrintWriter out = new PrintWriter( + new OutputStreamWriter(Files.newOutputStream(synonymsFile, StandardOpenOption.CREATE), StandardCharsets.UTF_8))) { + out.println("foo, baz"); + } + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", cluster().numDataNodes() * 2) + .put("index.number_of_replicas", 1) + .put("analysis.analyzer.my_synonym_analyzer.tokenizer", "standard") + .put("analysis.analyzer.my_synonym_analyzer.filter", "my_synonym_filter") + .put("analysis.filter.my_synonym_filter.type", "synonym") + .put("analysis.filter.my_synonym_filter.updateable", "true") + .put("analysis.filter.my_synonym_filter.synonyms_path", synonymsFileName)) + .addMapping("_doc", "field", "type=text,analyzer=standard,search_analyzer=my_synonym_analyzer")); + + client().prepareIndex("test", "_doc", "1").setSource("field", "foo").get(); + assertNoFailures(client().admin().indices().prepareRefresh("test").execute().actionGet()); + + SearchResponse response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + assertHitCount(response, 0L); + Response analyzeResponse = client().admin().indices().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get(); + assertEquals(2, analyzeResponse.getTokens().size()); + assertEquals("foo", analyzeResponse.getTokens().get(0).getTerm()); + assertEquals("baz", analyzeResponse.getTokens().get(1).getTerm()); + + // now update synonyms file several times and trigger reloading + for (int i = 0; i < 10; i++) { + String testTerm = randomAlphaOfLength(10); + try (PrintWriter out = new PrintWriter( + new OutputStreamWriter(Files.newOutputStream(synonymsFile, StandardOpenOption.WRITE), StandardCharsets.UTF_8))) { + out.println("foo, baz, " + testTerm); + } + ReloadAnalyzersResponse reloadResponse = client().execute(ReloadAnalyzerAction.INSTANCE, new ReloadAnalyzersRequest("test")) + .actionGet(); + assertNoFailures(reloadResponse); + assertEquals(cluster().numDataNodes(), reloadResponse.getSuccessfulShards()); + + analyzeResponse = client().admin().indices().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get(); + assertEquals(3, analyzeResponse.getTokens().size()); + Set tokens = new HashSet<>(); + analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); + assertTrue(tokens.contains("foo")); + assertTrue(tokens.contains("baz")); + assertTrue(tokens.contains(testTerm)); + + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", testTerm)).get(); + assertHitCount(response, 1L); + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/indices.reload_search_analyzers.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/indices.reload_search_analyzers.json new file mode 100644 index 0000000000000..bd79dbf4718f5 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/indices.reload_search_analyzers.json @@ -0,0 +1,33 @@ +{ + "indices.reload_search_analyzers": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-reload-analyzers.html", + "stability": "experimental", + "methods": ["GET", "POST"], + "url": { + "paths": ["/{index}/_reload_search_analyzers"], + "parts": { + "index": { + "type": "list", + "description" : "A comma-separated list of index names to reload analyzers for" + } + }, + "params": { + "ignore_unavailable": { + "type" : "boolean", + "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" + }, + "allow_no_indices": { + "type" : "boolean", + "description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + }, + "expand_wildcards": { + "type" : "enum", + "options" : ["open","closed","none","all"], + "default" : "open", + "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." + } + } + }, + "body": null + } +} From 66e1853f34081cd00e4c4bbef37adfae11cb5196 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 27 Jun 2019 09:42:47 -0700 Subject: [PATCH 043/140] [DOCS] Adds anchors and attributes to ML APIs --- .../apis/delete-transform.asciidoc | 2 +- .../data-frames/apis/get-transform.asciidoc | 2 +- .../apis/preview-transform.asciidoc | 2 +- .../data-frames/apis/put-transform.asciidoc | 2 +- .../data-frames/apis/start-transform.asciidoc | 2 +- .../data-frames/apis/stop-transform.asciidoc | 2 +- docs/reference/ml/apis/close-job.asciidoc | 28 ++++++++-------- .../ml/apis/delete-calendar-event.asciidoc | 20 ++++++------ .../ml/apis/delete-calendar-job.asciidoc | 17 +++++----- .../ml/apis/delete-calendar.asciidoc | 22 ++++++------- .../ml/apis/delete-datafeed.asciidoc | 27 ++++++++-------- .../ml/apis/delete-expired-data.asciidoc | 13 +++++--- docs/reference/ml/apis/delete-filter.asciidoc | 20 ++++++------ .../ml/apis/delete-forecast.asciidoc | 21 ++++++------ docs/reference/ml/apis/delete-job.asciidoc | 21 ++++++------ .../ml/apis/delete-snapshot.asciidoc | 19 +++++------ .../ml/apis/find-file-structure.asciidoc | 19 ++++++----- docs/reference/ml/apis/flush-job.asciidoc | 24 +++++++------- docs/reference/ml/apis/forecast.asciidoc | 23 +++++++------ docs/reference/ml/apis/get-bucket.asciidoc | 32 +++++++++---------- .../ml/apis/get-calendar-event.asciidoc | 27 +++++++++------- docs/reference/ml/apis/get-calendar.asciidoc | 30 ++++++++--------- docs/reference/ml/apis/get-category.asciidoc | 32 ++++++++++--------- .../ml/apis/get-datafeed-stats.asciidoc | 27 ++++++++-------- docs/reference/ml/apis/get-datafeed.asciidoc | 25 ++++++++------- docs/reference/ml/apis/get-filter.asciidoc | 30 ++++++++--------- .../reference/ml/apis/get-influencer.asciidoc | 29 ++++++++--------- docs/reference/ml/apis/get-job-stats.asciidoc | 28 ++++++++-------- docs/reference/ml/apis/get-job.asciidoc | 25 ++++++++------- docs/reference/ml/apis/get-ml-info.asciidoc | 18 ++++++----- .../ml/apis/get-overall-buckets.asciidoc | 28 +++++++++------- docs/reference/ml/apis/get-record.asciidoc | 30 ++++++++--------- docs/reference/ml/apis/get-snapshot.asciidoc | 27 ++++++++-------- docs/reference/ml/apis/open-job.asciidoc | 26 +++++++-------- .../ml/apis/post-calendar-event.asciidoc | 26 ++++++++------- docs/reference/ml/apis/post-data.asciidoc | 31 +++++++++--------- .../ml/apis/preview-datafeed.asciidoc | 24 +++++++------- .../ml/apis/put-calendar-job.asciidoc | 16 ++++++---- docs/reference/ml/apis/put-calendar.asciidoc | 25 ++++++++------- docs/reference/ml/apis/put-datafeed.asciidoc | 28 ++++++++-------- docs/reference/ml/apis/put-filter.asciidoc | 23 +++++++------ docs/reference/ml/apis/put-job.asciidoc | 23 +++++++------ .../ml/apis/revert-snapshot.asciidoc | 24 +++++++------- .../ml/apis/set-upgrade-mode.asciidoc | 17 ++++++---- .../reference/ml/apis/start-datafeed.asciidoc | 25 ++++++++------- docs/reference/ml/apis/stop-datafeed.asciidoc | 25 ++++++++------- .../ml/apis/update-datafeed.asciidoc | 24 ++++++++------ docs/reference/ml/apis/update-filter.asciidoc | 20 ++++++------ docs/reference/ml/apis/update-job.asciidoc | 19 ++++++----- .../ml/apis/update-snapshot.asciidoc | 23 +++++++------ .../ml/apis/validate-detector.asciidoc | 20 ++++++------ docs/reference/ml/apis/validate-job.asciidoc | 20 ++++++------ 52 files changed, 585 insertions(+), 528 deletions(-) diff --git a/docs/reference/data-frames/apis/delete-transform.asciidoc b/docs/reference/data-frames/apis/delete-transform.asciidoc index 7cc911e91acb1..23c70d914f08f 100644 --- a/docs/reference/data-frames/apis/delete-transform.asciidoc +++ b/docs/reference/data-frames/apis/delete-transform.asciidoc @@ -43,7 +43,7 @@ NOTE: Before you can delete the {dataframe-transform}, you must stop it. [discrete] [[delete-data-frame-transform-examples]] -==== {api-example-title} +==== {api-examples-title} [source,js] -------------------------------------------------- diff --git a/docs/reference/data-frames/apis/get-transform.asciidoc b/docs/reference/data-frames/apis/get-transform.asciidoc index 9dd0ff092d754..847d764c01267 100644 --- a/docs/reference/data-frames/apis/get-transform.asciidoc +++ b/docs/reference/data-frames/apis/get-transform.asciidoc @@ -73,7 +73,7 @@ see {stack-ov}/security-privileges.html[Security privileges] and [discrete] [[get-data-frame-transform-example]] -==== {api-example-title} +==== {api-examples-title} The following example retrieves information about a maximum of ten transforms: diff --git a/docs/reference/data-frames/apis/preview-transform.asciidoc b/docs/reference/data-frames/apis/preview-transform.asciidoc index d4f2a9e6a12da..4e11fd5eda231 100644 --- a/docs/reference/data-frames/apis/preview-transform.asciidoc +++ b/docs/reference/data-frames/apis/preview-transform.asciidoc @@ -42,7 +42,7 @@ If the {es} {security-features} are enabled, you must have reduce the data. See <>. [discrete] -==== {api-example-title} +==== {api-examples-title} [source,js] -------------------------------------------------- diff --git a/docs/reference/data-frames/apis/put-transform.asciidoc b/docs/reference/data-frames/apis/put-transform.asciidoc index a24cc7d224531..3c6a5251bffd8 100644 --- a/docs/reference/data-frames/apis/put-transform.asciidoc +++ b/docs/reference/data-frames/apis/put-transform.asciidoc @@ -71,7 +71,7 @@ IMPORTANT: You must use {kib} or this API to create a {dataframe-transform}. [discrete] [[put-data-frame-transform-example]] -==== {api-example-title} +==== {api-examples-title} [source,js] -------------------------------------------------- diff --git a/docs/reference/data-frames/apis/start-transform.asciidoc b/docs/reference/data-frames/apis/start-transform.asciidoc index 059f8a634146f..e7ae0353f5ca6 100644 --- a/docs/reference/data-frames/apis/start-transform.asciidoc +++ b/docs/reference/data-frames/apis/start-transform.asciidoc @@ -40,7 +40,7 @@ have `view_index_metadata` privileges on the source index for the [discrete] [[start-data-frame-transform-example]] -==== {api-example-title} +==== {api-examples-title} [source,js] -------------------------------------------------- diff --git a/docs/reference/data-frames/apis/stop-transform.asciidoc b/docs/reference/data-frames/apis/stop-transform.asciidoc index 4ade2706ab2b6..9a08aaf0a9b4d 100644 --- a/docs/reference/data-frames/apis/stop-transform.asciidoc +++ b/docs/reference/data-frames/apis/stop-transform.asciidoc @@ -74,7 +74,7 @@ All {dataframe-transforms} can be stopped by using `_all` or `*` as the [discrete] [[stop-data-frame-transform-example]] -==== {api-example-title} +==== {api-examples-title} [source,js] -------------------------------------------------- diff --git a/docs/reference/ml/apis/close-job.asciidoc b/docs/reference/ml/apis/close-job.asciidoc index c5f9b5fc2444a..fa96b18777d42 100644 --- a/docs/reference/ml/apis/close-job.asciidoc +++ b/docs/reference/ml/apis/close-job.asciidoc @@ -12,8 +12,9 @@ A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. - -==== Request +[discrete] +[[ml-close-job-request]] +==== {api-request-title} `POST _ml/anomaly_detectors//_close` + @@ -21,8 +22,8 @@ operations, but you can still explore and navigate results. `POST _ml/anomaly_detectors/_all/_close` + - -==== Description +[[ml-close-job-desc]] +==== {api-description-title} You can close multiple jobs in a single API request by using a group name, a comma-separated list of jobs, or a wildcard expression. You can close all jobs @@ -47,15 +48,16 @@ after the close job API returns. The `force` query parameter should only be use situations where the job has already failed, or where you are not interested in results the job might have recently produced or might produce in the future. - -==== Path Parameters +[discrete] +[[ml-close-job-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) Identifier for the job. It can be a job identifier, a group name, or a wildcard expression. - -==== Query Parameters +[[ml-close-job-query-parms]] +==== {api-query-parms-title} `force`:: (boolean) Use to close a failed job, or to forcefully close a job which has not @@ -65,14 +67,14 @@ results the job might have recently produced or might produce in the future. (time units) Controls the time to wait until a job has closed. The default value is 30 minutes. - -==== Authorization +[[ml-close-job-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. - +For more information, see {stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-close-job-example]] +==== {api-examples-title} The following example closes the `total-requests` job: diff --git a/docs/reference/ml/apis/delete-calendar-event.asciidoc b/docs/reference/ml/apis/delete-calendar-event.asciidoc index 68f7a0738375d..bc99398991bd6 100644 --- a/docs/reference/ml/apis/delete-calendar-event.asciidoc +++ b/docs/reference/ml/apis/delete-calendar-event.asciidoc @@ -8,19 +8,20 @@ Deletes scheduled events from a calendar. - -==== Request +[[ml-delete-calendar-event-request]] +==== {api-request-title} `DELETE _ml/calendars//events/` - -==== Description +[[ml-delete-calendar-event-desc]] +==== {api-description-title} This API removes individual events from a calendar. To remove all scheduled events and delete the calendar, see the <>. -==== Path Parameters +[[ml-delete-calendar-event-path-parms]] +==== {api-path-parms-title} `calendar_id`(required):: (string) Identifier for the calendar. @@ -29,13 +30,14 @@ events and delete the calendar, see the (string) Identifier for the scheduled event. You can obtain this identifier by using the <>. - -==== Authorization +[[ml-delete-calendar-event-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. +For more information, see {stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-delete-calendar-event-example]] +==== {api-examples-title} The following example deletes a scheduled event from the `planned-outages` calendar: diff --git a/docs/reference/ml/apis/delete-calendar-job.asciidoc b/docs/reference/ml/apis/delete-calendar-job.asciidoc index 118a706d29460..9451734c2304f 100644 --- a/docs/reference/ml/apis/delete-calendar-job.asciidoc +++ b/docs/reference/ml/apis/delete-calendar-job.asciidoc @@ -8,13 +8,13 @@ Deletes jobs from a calendar. - -==== Request +[[ml-delete-calendar-job-request]] +==== {api-request-title} `DELETE _ml/calendars//jobs/` - -==== Path Parameters +[[ml-delete-calendar-job-path-parms]] +==== {api-path-parms-title} `calendar_id`(required):: (string) Identifier for the calendar. @@ -23,13 +23,14 @@ Deletes jobs from a calendar. (string) An identifier for the job. It can be a job identifier, a group name, or a comma-separated list of jobs or groups. - -==== Authorization +[[ml-delete-calendar-job-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. +For more information, see {stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-delete-calendar-job-example]] +==== {api-examples-title} The following example removes the association between the `planned-outages` calendar and `total-requests` job: diff --git a/docs/reference/ml/apis/delete-calendar.asciidoc b/docs/reference/ml/apis/delete-calendar.asciidoc index 2707f3175e04c..c07eb37c93dfc 100644 --- a/docs/reference/ml/apis/delete-calendar.asciidoc +++ b/docs/reference/ml/apis/delete-calendar.asciidoc @@ -8,31 +8,31 @@ Deletes a calendar. - -==== Request +[[ml-delete-calendar-request]] +==== {api-request-title} `DELETE _ml/calendars/` - -==== Description +[[ml-delete-calendar-desc]] +==== {api-description-title} This API removes all scheduled events from the calendar then deletes the calendar. - -==== Path Parameters +[[ml-delete-calendar-path-parms]] +==== {api-path-parms-title} `calendar_id` (required):: (string) Identifier for the calendar. - -==== Authorization +[[ml-delete-calendar-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. - +For more information, see {stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-delete-calendar-example]] +==== {api-examples-title} The following example deletes the `planned-outages` calendar: diff --git a/docs/reference/ml/apis/delete-datafeed.asciidoc b/docs/reference/ml/apis/delete-datafeed.asciidoc index d2a7845be7584..9686959427daa 100644 --- a/docs/reference/ml/apis/delete-datafeed.asciidoc +++ b/docs/reference/ml/apis/delete-datafeed.asciidoc @@ -10,38 +10,37 @@ Deletes an existing {dfeed}. - -==== Request +[[ml-delete-datafeed-request]] +==== {api-request-title} `DELETE _ml/datafeeds/` - -==== Description +[[ml-delete-datafeed-desc]] +==== {api-description-title} NOTE: Unless the `force` parameter is used, the {dfeed} must be stopped before it can be deleted. - -==== Path Parameters +[[ml-delete-datafeed-path-parms]] +==== {api-path-parms-title} `feed_id` (required):: (string) Identifier for the {dfeed} - -===== Query Parameters +[[ml-delete-datafeed-query-parms]] +==== {api-query-parms-title} `force`:: (boolean) Use to forcefully delete a started {dfeed}; this method is quicker than stopping and deleting the {dfeed}. - -===== Authorization +[[ml-delete-datafeed-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. -//<>. - +For more information, see {stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-delete-datafeed-example]] +==== {api-examples-title} The following example deletes the `datafeed-total-requests` {dfeed}: diff --git a/docs/reference/ml/apis/delete-expired-data.asciidoc b/docs/reference/ml/apis/delete-expired-data.asciidoc index 8814a1686736e..56ca1871329ee 100644 --- a/docs/reference/ml/apis/delete-expired-data.asciidoc +++ b/docs/reference/ml/apis/delete-expired-data.asciidoc @@ -8,25 +8,28 @@ Deletes expired and unused machine learning data. -==== Request +[[ml-delete-expired-data-request]] +==== {api-request-title} `DELETE _ml/_delete_expired_data` -==== Description +[[ml-delete-expired-data-desc]] +==== {api-description-title} Deletes all job results, model snapshots and forecast data that have exceeded their `retention days` period. Machine learning state documents that are not associated with any job are also deleted. -==== Authorization +[[ml-delete-expired-data-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {stack-ov}/security-privileges.html[Security Privileges] and {stack-ov}/built-in-roles.html[Built-in Roles]. - -==== Examples +[[ml-delete-expired-data-example]] +==== {api-examples-title} The endpoint takes no arguments: diff --git a/docs/reference/ml/apis/delete-filter.asciidoc b/docs/reference/ml/apis/delete-filter.asciidoc index b2cbb7ef2832d..8d6797448ec96 100644 --- a/docs/reference/ml/apis/delete-filter.asciidoc +++ b/docs/reference/ml/apis/delete-filter.asciidoc @@ -8,32 +8,32 @@ Deletes a filter. - -==== Request +[[ml-delete-filter-request]] +==== {api-request-title} `DELETE _ml/filters/` - -==== Description +[[ml-delete-filter-desc]] +==== {api-description-title} This API deletes a {stack-ov}/ml-rules.html[filter]. If a {ml} job references the filter, you cannot delete the filter. You must update or delete the job before you can delete the filter. - -==== Path Parameters +[[ml-delete-filter-path-parms]] +==== {api-path-parms-title} `filter_id` (required):: (string) Identifier for the filter. - -==== Authorization +[[ml-delete-filter-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. - -==== Examples +[[ml-delete-filter-example]] +==== {api-examples-title} The following example deletes the `safe_domains` filter: diff --git a/docs/reference/ml/apis/delete-forecast.asciidoc b/docs/reference/ml/apis/delete-forecast.asciidoc index 133b9105e478a..8332d07f84041 100644 --- a/docs/reference/ml/apis/delete-forecast.asciidoc +++ b/docs/reference/ml/apis/delete-forecast.asciidoc @@ -8,7 +8,8 @@ Deletes forecasts from a {ml} job. -==== Request +[[ml-delete-forecast-request]] +==== {api-request-title} `DELETE _ml/anomaly_detectors//_forecast` + @@ -16,8 +17,8 @@ Deletes forecasts from a {ml} job. `DELETE _ml/anomaly_detectors//_forecast/_all` - -==== Description +[[ml-delete-forecast-desc]] +==== {api-description-title} By default, forecasts are retained for 14 days. You can specify a different retention period with the `expires_in` parameter in the <>. The delete forecast API enables you to delete one or more forecasts before they expire. @@ -26,8 +27,8 @@ NOTE: When you delete a job its associated forecasts are deleted. For more information, see {stack-ov}/ml-overview.html#ml-forecasting[Forecasting the Future]. - -==== Path Parameters +[[ml-delete-forecast-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job. @@ -37,7 +38,8 @@ For more information, see {stack-ov}/ml-overview.html#ml-forecasting[Forecasting If you do not specify this optional parameter or if you specify `_all`, the API deletes all forecasts from the job. -==== Request Parameters +[[ml-delete-forecast-request-body]] +==== {api-request-body-title} `allow_no_forecasts`:: (boolean) Specifies whether an error occurs when there are no forecasts. In @@ -51,13 +53,14 @@ For more information, see {stack-ov}/ml-overview.html#ml-forecasting[Forecasting an error. The default value is `30s`. For more information about time units, see <>. - -==== Authorization +[[ml-delete-forecast-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {stack-ov}/security-privileges.html[Security Privileges]. -==== Examples +[[ml-delete-forecast-example]] +==== {api-examples-title} The following example deletes all forecasts from the `total-requests` job: diff --git a/docs/reference/ml/apis/delete-job.asciidoc b/docs/reference/ml/apis/delete-job.asciidoc index a52c434f93cfe..94042ba356559 100644 --- a/docs/reference/ml/apis/delete-job.asciidoc +++ b/docs/reference/ml/apis/delete-job.asciidoc @@ -8,13 +8,13 @@ Deletes an existing anomaly detection job. - -==== Request +[[ml-delete-job-request]] +==== {api-request-title} `DELETE _ml/anomaly_detectors/` - -==== Description +[[ml-delete-job-desc]] +==== {api-description-title} All job configuration, model state and results are deleted. @@ -30,12 +30,14 @@ is used the job must be closed before it can be deleted. It is not currently possible to delete multiple jobs using wildcards or a comma separated list. -==== Path Parameters +[[ml-delete-job-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job -===== Query Parameters +[[ml-delete-job-query-parms]] +==== {api-query-parms-title} `force`:: (boolean) Use to forcefully delete an opened job; this method is quicker than @@ -45,14 +47,15 @@ separated list. (boolean) Specifies whether the request should return immediately or wait until the job deletion completes. Defaults to `true`. -==== Authorization +[[ml-delete-job-prereqs]] +==== {api-prereq-title} If {es} {security-features} are enabled, you must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {stack-ov}/security-privileges.html[Security Privileges]. - -==== Examples +[[ml-delete-job-example]] +==== {api-examples-title} The following example deletes the `total-requests` job: diff --git a/docs/reference/ml/apis/delete-snapshot.asciidoc b/docs/reference/ml/apis/delete-snapshot.asciidoc index 18092ff8e89c1..461f7fb422756 100644 --- a/docs/reference/ml/apis/delete-snapshot.asciidoc +++ b/docs/reference/ml/apis/delete-snapshot.asciidoc @@ -8,19 +8,20 @@ Deletes an existing model snapshot. - -==== Request +[[ml-delete-snapshot-request]] +==== {api-request-title} `DELETE _ml/anomaly_detectors//model_snapshots/` - -==== Description +[[ml-delete-snapshot-desc]] +==== {api-description-title} IMPORTANT: You cannot delete the active model snapshot. To delete that snapshot, first revert to a different one. To identify the active model snapshot, refer to the `model_snapshot_id` in the results from the get jobs API. -==== Path Parameters +[[ml-delete-snapshot-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job @@ -28,14 +29,14 @@ the `model_snapshot_id` in the results from the get jobs API. `snapshot_id` (required):: (string) Identifier for the model snapshot - -==== Authorization +[[ml-delete-snapshot-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. - -==== Examples +[[ml-delete-snapshot-example]] +==== {api-examples-title} The following example deletes the `1491948163` snapshot: diff --git a/docs/reference/ml/apis/find-file-structure.asciidoc b/docs/reference/ml/apis/find-file-structure.asciidoc index f3029635bf44e..ead3087f3d86c 100644 --- a/docs/reference/ml/apis/find-file-structure.asciidoc +++ b/docs/reference/ml/apis/find-file-structure.asciidoc @@ -11,12 +11,13 @@ experimental[] Finds the structure of a text file. The text file must contain data that is suitable to be ingested into {es}. -==== Request +[[ml-find-file-structure-request]] +==== {api-request-title} `POST _ml/find_file_structure` - -==== Description +[[ml-find-file-structure-desc]] +==== {api-description-title} This API provides a starting point for ingesting data into {es} in a format that is suitable for subsequent use with other {ml} functionality. @@ -47,7 +48,8 @@ specify the `explain` query parameter. It causes an `explanation` to appear in the response, which should help in determining why the returned structure was chosen. -==== Query Parameters +[[ml-find-file-structure-query-parms]] +==== {api-query-parms-title} `charset`:: (string) The file's character set. It must be a character set that is supported @@ -197,22 +199,23 @@ format from a built-in set. -- -==== Request Body +[[ml-find-file-structure-request-body]] +==== {api-request-body-title} The text file that you want to analyze. It must contain data that is suitable to be ingested into {es}. It does not need to be in JSON format and it does not need to be UTF-8 encoded. The size is limited to the {es} HTTP receive buffer size, which defaults to 100 Mb. - -==== Authorization +[[ml-find-file-structure-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, or `monitor` cluster privileges to use this API. For more information, see {stack-ov}/security-privileges.html[Security Privileges]. [[ml-find-file-structure-examples]] -==== Examples +==== {api-examples-title} Suppose you have a newline-delimited JSON file that contains information about some books. You can send the contents to the `find_file_structure` endpoint: diff --git a/docs/reference/ml/apis/flush-job.asciidoc b/docs/reference/ml/apis/flush-job.asciidoc index e2793b2c1a174..6598f8155b931 100644 --- a/docs/reference/ml/apis/flush-job.asciidoc +++ b/docs/reference/ml/apis/flush-job.asciidoc @@ -8,13 +8,13 @@ Forces any buffered data to be processed by the job. - -==== Request +[[ml-flush-job-request]] +==== {api-request-title} `POST _ml/anomaly_detectors//_flush` - -==== Description +[[ml-flush-job-desc]] +==== {api-description-title} The flush jobs API is only applicable when sending data for analysis using the <>. Depending on the content of the buffer, then it @@ -26,14 +26,14 @@ remains open and is available to continue analyzing data. A close operation additionally prunes and persists the model state to disk and the job must be opened again before analyzing further data. - -==== Path Parameters +[[ml-flush-job-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job - -==== Query Parameters +[[ml-flush-job-query-parms]] +==== {api-query-parms-title} `advance_time`:: (string) Specifies to advance to a particular time value. Results are @@ -56,14 +56,14 @@ opened again before analyzing further data. (string) When used in conjunction with `calc_interim`, specifies the range of buckets on which to calculate interim results. - -==== Authorization +[[ml-flush-job-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. - -==== Examples +[[ml-flush-job-example]] +==== {api-examples-title} The following example flushes the `total-requests` job: diff --git a/docs/reference/ml/apis/forecast.asciidoc b/docs/reference/ml/apis/forecast.asciidoc index 71a7e1db2b185..05bd250975dfb 100644 --- a/docs/reference/ml/apis/forecast.asciidoc +++ b/docs/reference/ml/apis/forecast.asciidoc @@ -8,14 +8,15 @@ Predicts the future behavior of a time series by using its historical behavior. -==== Request +[[ml-forecast-request]] +==== {api-request-title} `POST _ml/anomaly_detectors//_forecast` +[[ml-forecast-desc]] +==== {api-description-title} -==== Description - -See {xpack-ref}/ml-overview.html#ml-forecasting[Forecasting the Future]. +See {stack-ov}/ml-overview.html#ml-forecasting[Forecasting the Future]. [NOTE] =============================== @@ -25,13 +26,14 @@ forecast. For more information about this property, see <>. * The job must be open when you create a forecast. Otherwise, an error occurs. =============================== -==== Path Parameters +[[ml-forecast-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) Identifier for the job. - -==== Request Parameters +[[ml-forecast-request-body]] +==== {api-request-body-title} `duration`:: (time units) A period of time that indicates how far into the future to @@ -45,13 +47,14 @@ forecast. For more information about this property, see <>. If set to a value of `0`, the forecast is never automatically deleted. For more information about time units, see <>. -==== Authorization +[[ml-forecast-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. - -==== Examples +[[ml-forecast-example]] +==== {api-examples-title} The following example requests a 10 day forecast for the `total-requests` job: diff --git a/docs/reference/ml/apis/get-bucket.asciidoc b/docs/reference/ml/apis/get-bucket.asciidoc index 39c548dd64e8d..0e2b7988e8ead 100644 --- a/docs/reference/ml/apis/get-bucket.asciidoc +++ b/docs/reference/ml/apis/get-bucket.asciidoc @@ -8,21 +8,21 @@ Retrieves job results for one or more buckets. - -==== Request +[[ml-get-bucket-request]] +==== {api-request-title} `GET _ml/anomaly_detectors//results/buckets` + `GET _ml/anomaly_detectors//results/buckets/` - -==== Description +[[ml-get-bucket-desc]] +==== {api-description-title} The get buckets API presents a chronological view of the records, grouped by bucket. - -==== Path Parameters +[[ml-get-bucket-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) Identifier for the job @@ -32,8 +32,8 @@ bucket. If you do not specify this optional parameter, the API returns information about all buckets. - -==== Request Body +[[ml-get-bucket-request-body]] +==== {api-request-body-title} `anomaly_score`:: (double) Returns buckets with anomaly scores greater or equal than this value. @@ -64,8 +64,8 @@ bucket. `start`:: (string) Returns buckets with timestamps after this time. - -===== Results +[[ml-get-bucket-results]] +==== {api-response-body-title} The API returns the following information: @@ -73,18 +73,18 @@ The API returns the following information: (array) An array of bucket objects. For more information, see <>. - -==== Authorization +[[ml-get-bucket-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also need `read` index privilege on the index that stores the results. The `machine_learning_admin` and `machine_learning_user` roles provide these privileges. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges] and -{xpack-ref}/built-in-roles.html[Built-in Roles]. - +{stack-ov}/security-privileges.html[Security Privileges] and +{stack-ov}/built-in-roles.html[Built-in Roles]. -==== Examples +[[ml-get-bucket-example]] +==== {api-examples-title} The following example gets bucket information for the `it-ops-kpi` job: diff --git a/docs/reference/ml/apis/get-calendar-event.asciidoc b/docs/reference/ml/apis/get-calendar-event.asciidoc index a890f67db0d23..1ee94eff7b5c6 100644 --- a/docs/reference/ml/apis/get-calendar-event.asciidoc +++ b/docs/reference/ml/apis/get-calendar-event.asciidoc @@ -9,25 +9,27 @@ Retrieves information about the scheduled events in calendars. - -==== Request +[[ml-get-calendar-event-request]] +==== {api-request-title} `GET _ml/calendars//events` + `GET _ml/calendars/_all/events` - -===== Description +[[ml-get-calendar-event-desc]] +==== {api-description-title} You can get scheduled event information for a single calendar or for all calendars by using `_all`. -==== Path Parameters +[[ml-get-calendar-event-path-parms]] +==== {api-path-parms-title} `calendar_id` (required):: (string) Identifier for the calendar. -==== Request Body +[[ml-get-calendar-event-request-body]] +==== {api-request-body-title} `end`:: (string) Specifies to get events with timestamps earlier than this time. @@ -41,7 +43,8 @@ calendars by using `_all`. `start`:: (string) Specifies to get events with timestamps after this time. -==== Results +[[ml-get-calendar-event-results]] +==== {api-response-body-title} The API returns the following information: @@ -49,15 +52,15 @@ The API returns the following information: (array) An array of scheduled event resources. For more information, see <>. - -==== Authorization +[[ml-get-calendar-event-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security Privileges]. -==== Examples +[[ml-get-calendar-event-example]] +==== {api-examples-title} The following example gets information about the scheduled events in the `planned-outages` calendar: diff --git a/docs/reference/ml/apis/get-calendar.asciidoc b/docs/reference/ml/apis/get-calendar.asciidoc index 09e429b1f6de3..1ff9f8442c28e 100644 --- a/docs/reference/ml/apis/get-calendar.asciidoc +++ b/docs/reference/ml/apis/get-calendar.asciidoc @@ -8,27 +8,27 @@ Retrieves configuration information for calendars. - -==== Request +[[ml-get-calendar-request]] +==== {api-request-title} `GET _ml/calendars/` + `GET _ml/calendars/_all` - -===== Description +[[ml-get-calendar-desc]] +==== {api-description-title} You can get information for a single calendar or for all calendars by using `_all`. - -==== Path Parameters +[[ml-get-calendar-path-parms]] +==== {api-path-parms-title} `calendar_id`:: (string) Identifier for the calendar. - -==== Request Body +[[ml-get-calendar-request-body]] +==== {api-request-body-title} `page`:: `from`::: @@ -37,8 +37,8 @@ You can get information for a single calendar or for all calendars by using `size`::: (integer) Specifies the maximum number of calendars to obtain. - -==== Results +[[ml-get-calendar-results]] +==== {api-response-body-title} The API returns the following information: @@ -46,15 +46,15 @@ The API returns the following information: (array) An array of calendar resources. For more information, see <>. - -==== Authorization +[[ml-get-calendar-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security Privileges]. -==== Examples +[[ml-get-calendar-example]] +==== {api-examples-title} The following example gets configuration information for the `planned-outages` calendar: diff --git a/docs/reference/ml/apis/get-category.asciidoc b/docs/reference/ml/apis/get-category.asciidoc index 1fbfda20eccc0..252f59c3ef205 100644 --- a/docs/reference/ml/apis/get-category.asciidoc +++ b/docs/reference/ml/apis/get-category.asciidoc @@ -8,19 +8,21 @@ Retrieves job results for one or more categories. - -==== Request +[[ml-get-category-request]] +==== {api-request-title} `GET _ml/anomaly_detectors//results/categories` + `GET _ml/anomaly_detectors//results/categories/` -==== Description +[[ml-get-category-desc]] +==== {api-description-title} For more information about categories, see -{xpack-ref}/ml-configuring-categories.html[Categorizing Log Messages]. +{stack-ov}/ml-configuring-categories.html[Categorizing Log Messages]. -==== Path Parameters +[[ml-get-category-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) Identifier for the job. @@ -29,8 +31,8 @@ For more information about categories, see (long) Identifier for the category. If you do not specify this optional parameter, the API returns information about all categories in the job. - -==== Request Body +[[ml-get-category-request-body]] +==== {api-request-body-title} `page`:: `from`::: @@ -38,8 +40,8 @@ For more information about categories, see `size`::: (integer) Specifies the maximum number of categories to obtain. - -==== Results +[[ml-get-category-results]] +==== {api-response-body-title} The API returns the following information: @@ -47,18 +49,18 @@ The API returns the following information: (array) An array of category objects. For more information, see <>. - -==== Authorization +[[ml-get-category-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also need `read` index privilege on the index that stores the results. The `machine_learning_admin` and `machine_learning_user` roles provide these privileges. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges] and -{xpack-ref}/built-in-roles.html[Built-in Roles]. - +{stack-ov}/security-privileges.html[Security Privileges] and +{stack-ov}/built-in-roles.html[Built-in Roles]. -==== Examples +[[ml-get-category-example]] +==== {api-examples-title} The following example gets information about one category for the `esxi_log` job: diff --git a/docs/reference/ml/apis/get-datafeed-stats.asciidoc b/docs/reference/ml/apis/get-datafeed-stats.asciidoc index d1b842509b408..1789478e081e9 100644 --- a/docs/reference/ml/apis/get-datafeed-stats.asciidoc +++ b/docs/reference/ml/apis/get-datafeed-stats.asciidoc @@ -10,9 +10,8 @@ Retrieves usage information for {dfeeds}. - -==== Request - +[[ml-get-datafeed-stats-request]] +==== {api-request-title} `GET _ml/datafeeds//_stats` + @@ -22,9 +21,8 @@ Retrieves usage information for {dfeeds}. `GET _ml/datafeeds/_all/_stats` + - - -==== Description +[[ml-get-datafeed-stats-desc]] +==== {api-description-title} You can get statistics for multiple {dfeeds} in a single API request by using a comma-separated list of {dfeeds} or a wildcard expression. You can get @@ -36,15 +34,16 @@ If the {dfeed} is stopped, the only information you receive is the IMPORTANT: This API returns a maximum of 10,000 {dfeeds}. -==== Path Parameters +[[ml-get-datafeed-stats-path-parms]] +==== {api-path-parms-title} `feed_id`:: (string) Identifier for the {dfeed}. It can be a {dfeed} identifier or a wildcard expression. If you do not specify one of these options, the API returns statistics for all {dfeeds}. - -==== Results +[[ml-get-datafeed-stats-results]] +==== {api-response-body-title} The API returns the following information: @@ -52,15 +51,15 @@ The API returns the following information: (array) An array of {dfeed} count objects. For more information, see <>. - -==== Authorization +[[ml-get-datafeed-stats-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security Privileges]. -==== Examples +[[ml-get-datafeed-stats-example]] +==== {api-examples-title} The following example gets usage information for the `datafeed-total-requests` {dfeed}: diff --git a/docs/reference/ml/apis/get-datafeed.asciidoc b/docs/reference/ml/apis/get-datafeed.asciidoc index 2c9ef7e3aec74..8cb0881827747 100644 --- a/docs/reference/ml/apis/get-datafeed.asciidoc +++ b/docs/reference/ml/apis/get-datafeed.asciidoc @@ -10,8 +10,8 @@ Retrieves configuration information for {dfeeds}. -==== Request - +[[ml-get-datafeed-request]] +==== {api-request-title} `GET _ml/datafeeds/` + @@ -21,8 +21,8 @@ Retrieves configuration information for {dfeeds}. `GET _ml/datafeeds/_all` + - -===== Description +[[ml-get-datafeed-desc]] +==== {api-description-title} You can get information for multiple {dfeeds} in a single API request by using a comma-separated list of {dfeeds} or a wildcard expression. You can get @@ -31,15 +31,16 @@ information for all {dfeeds} by using `_all`, by specifying `*` as the IMPORTANT: This API returns a maximum of 10,000 {dfeeds}. -==== Path Parameters +[[ml-get-datafeed-path-parms]] +==== {api-path-parms-title} `feed_id`:: (string) Identifier for the {dfeed}. It can be a {dfeed} identifier or a wildcard expression. If you do not specify one of these options, the API returns information about all {dfeeds}. - -==== Results +[[ml-get-datafeed-results]] +==== {api-response-body-title} The API returns the following information: @@ -47,15 +48,15 @@ The API returns the following information: (array) An array of {dfeed} objects. For more information, see <>. - -==== Authorization +[[ml-get-datafeed-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security Privileges]. -==== Examples +[[ml-get-datafeed-example]] +==== {api-examples-title} The following example gets configuration information for the `datafeed-total-requests` {dfeed}: diff --git a/docs/reference/ml/apis/get-filter.asciidoc b/docs/reference/ml/apis/get-filter.asciidoc index 53dfab2530a10..c69b717427272 100644 --- a/docs/reference/ml/apis/get-filter.asciidoc +++ b/docs/reference/ml/apis/get-filter.asciidoc @@ -8,27 +8,27 @@ Retrieves filters. - -==== Request +[[ml-get-filter-request]] +==== {api-request-title} `GET _ml/filters/` + `GET _ml/filters/` - -===== Description +[[ml-get-filter-desc]] +==== {api-description-title} You can get a single filter or all filters. For more information, see {stack-ov}/ml-rules.html[Machine learning custom rules]. - -==== Path Parameters +[[ml-get-filter-path-parms]] +==== {api-path-parms-title} `filter_id`:: (string) Identifier for the filter. - -==== Querystring Parameters +[[ml-get-filter-query-parms]] +==== {api-query-parms-title} `from`::: (integer) Skips the specified number of filters. @@ -36,8 +36,8 @@ You can get a single filter or all filters. For more information, see `size`::: (integer) Specifies the maximum number of filters to obtain. - -==== Results +[[ml-get-filter-results]] +==== {api-response-body-title} The API returns the following information: @@ -45,15 +45,15 @@ The API returns the following information: (array) An array of filter resources. For more information, see <>. - -==== Authorization +[[ml-get-filter-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security Privileges]. -==== Examples +[[ml-get-filter-example]] +==== {api-examples-title} The following example gets configuration information for the `safe_domains` filter: diff --git a/docs/reference/ml/apis/get-influencer.asciidoc b/docs/reference/ml/apis/get-influencer.asciidoc index 7425a734ed441..fedcac2079245 100644 --- a/docs/reference/ml/apis/get-influencer.asciidoc +++ b/docs/reference/ml/apis/get-influencer.asciidoc @@ -8,19 +8,19 @@ Retrieves job results for one or more influencers. - -==== Request +[[ml-get-influencer-request]] +==== {api-request-title} `GET _ml/anomaly_detectors//results/influencers` -//===== Description - -==== Path Parameters +[[ml-get-influencer-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) Identifier for the job. -==== Request Body +[[ml-get-influencer-request-body]] +==== {api-request-body-title} `desc`:: (boolean) If true, the results are sorted in descending order. @@ -48,8 +48,8 @@ Retrieves job results for one or more influencers. `start`:: (string) Returns influencers with timestamps after this time. - -==== Results +[[ml-get-influencer-results]] +==== {api-response-body-title} The API returns the following information: @@ -57,19 +57,18 @@ The API returns the following information: (array) An array of influencer objects. For more information, see <>. - -==== Authorization +[[ml-get-influencer-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also need `read` index privilege on the index that stores the results. The `machine_learning_admin` and `machine_learning_user` roles provide these privileges. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges] and -{xpack-ref}/built-in-roles.html[Built-in Roles]. -//<> and <>. - +{stack-ov}/security-privileges.html[Security Privileges] and +{stack-ov}/built-in-roles.html[Built-in Roles]. -==== Examples +[[ml-get-influencer-example]] +==== {api-examples-title} The following example gets influencer information for the `it_ops_new_kpi` job: diff --git a/docs/reference/ml/apis/get-job-stats.asciidoc b/docs/reference/ml/apis/get-job-stats.asciidoc index f3a3207c1a0f3..4b32b11abf868 100644 --- a/docs/reference/ml/apis/get-job-stats.asciidoc +++ b/docs/reference/ml/apis/get-job-stats.asciidoc @@ -8,10 +8,8 @@ Retrieves usage information for jobs. - -==== Request - - +[[ml-get-job-stats-request]] +==== {api-request-title} `GET _ml/anomaly_detectors//_stats` @@ -21,8 +19,8 @@ Retrieves usage information for jobs. `GET _ml/anomaly_detectors/_all/_stats` + - -===== Description +[[ml-get-job-stats-desc]] +==== {api-description-title} You can get statistics for multiple jobs in a single API request by using a group name, a comma-separated list of jobs, or a wildcard expression. You can @@ -31,16 +29,16 @@ get statistics for all jobs by using `_all`, by specifying `*` as the IMPORTANT: This API returns a maximum of 10,000 jobs. - -==== Path Parameters +[[ml-get-job-stats-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) An identifier for the job. It can be a job identifier, a group name, or a wildcard expression. If you do not specify one of these options, the API returns statistics for all jobs. - -==== Results +[[ml-get-job-stats-results]] +==== {api-response-body-title} The API returns the following information: @@ -48,15 +46,15 @@ The API returns the following information: (array) An array of job statistics objects. For more information, see <>. - -==== Authorization +[[ml-get-job-stats-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security Privileges]. -==== Examples +[[ml-get-job-stats-example]] +==== {api-examples-title} The following example gets usage information for the `farequote` job: diff --git a/docs/reference/ml/apis/get-job.asciidoc b/docs/reference/ml/apis/get-job.asciidoc index 4eb7eaf5a7f07..a4bbb66b5d03f 100644 --- a/docs/reference/ml/apis/get-job.asciidoc +++ b/docs/reference/ml/apis/get-job.asciidoc @@ -8,8 +8,8 @@ Retrieves configuration information for jobs. - -==== Request +[[ml-get-job-request]] +==== {api-request-title} `GET _ml/anomaly_detectors/` + @@ -19,8 +19,8 @@ Retrieves configuration information for jobs. `GET _ml/anomaly_detectors/_all` - -===== Description +[[ml-get-job-desc]] +==== {api-description-title} You can get information for multiple jobs in a single API request by using a group name, a comma-separated list of jobs, or a wildcard expression. You can @@ -29,15 +29,16 @@ get information for all jobs by using `_all`, by specifying `*` as the IMPORTANT: This API returns a maximum of 10,000 jobs. - -==== Path Parameters +[[ml-get-job-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) Identifier for the job. It can be a job identifier, a group name, or a wildcard expression. If you do not specify one of these options, the API returns information for all jobs. -==== Results +[[ml-get-job-results]] +==== {api-response-body-title} The API returns the following information: @@ -45,15 +46,15 @@ The API returns the following information: (array) An array of job resources. For more information, see <>. - -==== Authorization +[[ml-get-job-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security Privileges]. -==== Examples +[[ml-get-job-example]] +==== {api-examples-title} The following example gets configuration information for the `total-requests` job: diff --git a/docs/reference/ml/apis/get-ml-info.asciidoc b/docs/reference/ml/apis/get-ml-info.asciidoc index 41b680e1327c0..b60a36eed2985 100644 --- a/docs/reference/ml/apis/get-ml-info.asciidoc +++ b/docs/reference/ml/apis/get-ml-info.asciidoc @@ -10,28 +10,30 @@ Returns defaults and limits used by machine learning. -==== Request +[[get-ml-info-request]] +==== {api-request-title} `GET _ml/info` -==== Description +[[get-ml-info-desc]] +==== {api-description-title} This endpoint is designed to be used by a user interface that needs to fully understand machine learning configurations where some options are not specified, meaning that the defaults should be used. This endpoint may be used to find out what those defaults are. - -==== Authorization +[[get-ml-info-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. The `machine_learning_admin` and `machine_learning_user` roles provide these privileges. For more information, see -{stack-ov}/security-privileges.html[Security Privileges] and -{stack-ov}/built-in-roles.html[Built-in Roles]. - +{stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. -==== Examples +[[get-ml-info-example]] +==== {api-examples-title} The endpoint takes no arguments: diff --git a/docs/reference/ml/apis/get-overall-buckets.asciidoc b/docs/reference/ml/apis/get-overall-buckets.asciidoc index d8592e6516bbb..81c5c371ac4a1 100644 --- a/docs/reference/ml/apis/get-overall-buckets.asciidoc +++ b/docs/reference/ml/apis/get-overall-buckets.asciidoc @@ -9,7 +9,8 @@ Retrieves overall bucket results that summarize the bucket results of multiple jobs. -==== Request +[[ml-get-overall-buckets-request]] +==== {api-request-title} `GET _ml/anomaly_detectors//results/overall_buckets` + @@ -17,7 +18,8 @@ bucket results of multiple jobs. `GET _ml/anomaly_detectors/_all/results/overall_buckets` -==== Description +[[ml-get-overall-buckets-desc]] +==== {api-description-title} You can summarize the bucket results for all jobs by using `_all` or by specifying `*` as the ``. @@ -41,13 +43,15 @@ to request overall buckets that span longer than the largest job's `bucket_span` When set, the `overall_score` will be the max `overall_score` of the corresponding overall buckets with a span equal to the largest job's `bucket_span`. -==== Path Parameters +[[ml-get-overall-buckets-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) Identifier for the job. It can be a job identifier, a group name, a comma-separated list of jobs or groups, or a wildcard expression. -==== Request Body +[[ml-get-overall-buckets-request-body]] +==== {api-request-body-title} `allow_no_jobs`:: (boolean) If `false` and the `job_id` does not match any job an error will @@ -76,8 +80,8 @@ overall buckets with a span equal to the largest job's `bucket_span`. (integer) The number of top job bucket scores to be used in the `overall_score` calculation. The default value is `1`. - -===== Results +[[ml-get-overall-buckets-results]] +==== {api-response-body-title} The API returns the following information: @@ -85,18 +89,18 @@ The API returns the following information: (array) An array of overall bucket objects. For more information, see <>. - -==== Authorization +[[ml-get-overall-buckets-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also need `read` index privilege on the index that stores the results. The `machine_learning_admin` and `machine_learning_user` roles provide these privileges. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges] and -{xpack-ref}/built-in-roles.html[Built-in Roles]. - +{stack-ov}/security-privileges.html[Security Privileges] and +{stack-ov}/built-in-roles.html[Built-in Roles]. -==== Examples +[[ml-get-overall-buckets-example]] +==== {api-examples-title} The following example gets overall buckets for jobs with IDs matching `job-*`: diff --git a/docs/reference/ml/apis/get-record.asciidoc b/docs/reference/ml/apis/get-record.asciidoc index afc7d2733c872..fec36aa4a5651 100644 --- a/docs/reference/ml/apis/get-record.asciidoc +++ b/docs/reference/ml/apis/get-record.asciidoc @@ -8,20 +8,19 @@ Retrieves anomaly records for a job. - -==== Request +[[ml-get-record-request]] +==== {api-request-title} `GET _ml/anomaly_detectors//results/records` -//===== Description - -==== Path Parameters +[[ml-get-record-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) Identifier for the job. - -==== Request Body +[[ml-get-record-request-body]] +==== {api-request-body-title} `desc`:: (boolean) If true, the results are sorted in descending order. @@ -49,8 +48,8 @@ Retrieves anomaly records for a job. `start`:: (string) Returns records with timestamps after this time. - -==== Results +[[ml-get-record-results]] +==== {api-response-body-title} The API returns the following information: @@ -58,19 +57,18 @@ The API returns the following information: (array) An array of record objects. For more information, see <>. - -==== Authorization +[[ml-get-record-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also need `read` index privilege on the index that stores the results. The `machine_learning_admin` and `machine_learning_user` roles provide these privileges. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges] and -{xpack-ref}/built-in-roles.html[Built-in Roles]. -//<> and <>. - +{stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. -==== Examples +[[ml-get-record-example]] +==== {api-examples-title} The following example gets record information for the `it-ops-kpi` job: diff --git a/docs/reference/ml/apis/get-snapshot.asciidoc b/docs/reference/ml/apis/get-snapshot.asciidoc index 4935a6e2d238f..eb5bc4354f27f 100644 --- a/docs/reference/ml/apis/get-snapshot.asciidoc +++ b/docs/reference/ml/apis/get-snapshot.asciidoc @@ -8,16 +8,15 @@ Retrieves information about model snapshots. - -==== Request +[[ml-get-snapshot-request]] +==== {api-request-title} `GET _ml/anomaly_detectors//model_snapshots` + `GET _ml/anomaly_detectors//model_snapshots/` -//===== Description - -==== Path Parameters +[[ml-get-snapshot-path-parms]] +==== {api-path-parms-title} `job_id`:: (string) Identifier for the job. @@ -26,7 +25,8 @@ Retrieves information about model snapshots. (string) Identifier for the model snapshot. If you do not specify this optional parameter, the API returns information about all model snapshots. -==== Request Body +[[ml-get-snapshot-request-body]] +==== {api-request-body-title} `desc`:: (boolean) If true, the results are sorted in descending order. @@ -47,8 +47,8 @@ Retrieves information about model snapshots. `start`:: (string) Returns snapshots with timestamps after this time. - -==== Results +[[ml-get-snapshot-results]] +==== {api-response-body-title} The API returns the following information: @@ -56,16 +56,15 @@ The API returns the following information: (array) An array of model snapshot objects. For more information, see <>. - -==== Authorization +[[ml-get-snapshot-prereqs]] +==== {api-prereq-title} You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. -//<>. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-get-snapshot-example]] +==== {api-examples-title} The following example gets model snapshot information for the `it_ops_new_logs` job: diff --git a/docs/reference/ml/apis/open-job.asciidoc b/docs/reference/ml/apis/open-job.asciidoc index 08c7b97d9c050..4966ab9fc654e 100644 --- a/docs/reference/ml/apis/open-job.asciidoc +++ b/docs/reference/ml/apis/open-job.asciidoc @@ -10,41 +10,41 @@ Opens one or more jobs. A job must be opened in order for it to be ready to receive and analyze data. A job can be opened and closed multiple times throughout its lifecycle. - -==== Request +[[ml-open-job-request]] +==== {api-request-title} `POST _ml/anomaly_detectors/{job_id}/_open` - -==== Description +[[ml-open-job-desc]] +==== {api-description-title} When you open a new job, it starts with an empty model. When you open an existing job, the most recent model state is automatically loaded. The job is ready to resume its analysis from where it left off, once new data is received. - -==== Path Parameters +[[ml-open-job-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job - -==== Request Body +[[ml-open-job-request-body]] +==== {api-request-body-title} `timeout`:: (time) Controls the time to wait until a job has opened. The default value is 30 minutes. - -==== Authorization +[[ml-open-job-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-open-job-example]] +==== {api-examples-title} The following example opens the `total-requests` job and sets an optional property: diff --git a/docs/reference/ml/apis/post-calendar-event.asciidoc b/docs/reference/ml/apis/post-calendar-event.asciidoc index 5d122a5d6d1a8..1a3614045ea4b 100644 --- a/docs/reference/ml/apis/post-calendar-event.asciidoc +++ b/docs/reference/ml/apis/post-calendar-event.asciidoc @@ -8,38 +8,40 @@ Posts scheduled events in a calendar. -==== Request +[[ml-post-calendar-event-request]] +==== {api-request-title} `POST _ml/calendars//events` +[[ml-post-calendar-event-desc]] +==== {api-description-title} -==== Description - -This API accepts a list of {xpack-ref}/ml-calendars.html[scheduled events], each +This API accepts a list of {stack-ov}/ml-calendars.html[scheduled events], each of which must have a start time, end time, and description. -==== Path Parameters +[[ml-post-calendar-event-path-parms]] +==== {api-path-parms-title} `calendar_id` (required):: (string) Identifier for the calendar. - -==== Request Body +[[ml-post-calendar-event-request-body]] +==== {api-request-body-title} `events`:: (array) A list of one of more scheduled events. The event's start and end times may be specified as integer milliseconds since the epoch or as a string in ISO 8601 format. See <>. - -==== Authorization +[[ml-post-calendar-event-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-post-calendar-event-example]] +==== {api-examples-title} You can add scheduled events to the `planned-outages` calendar as follows: diff --git a/docs/reference/ml/apis/post-data.asciidoc b/docs/reference/ml/apis/post-data.asciidoc index 2df0df69e9030..39fb048d8b448 100644 --- a/docs/reference/ml/apis/post-data.asciidoc +++ b/docs/reference/ml/apis/post-data.asciidoc @@ -8,13 +8,13 @@ Sends data to an anomaly detection job for analysis. - -==== Request +[[ml-post-data-request]] +==== {api-request-title} `POST _ml/anomaly_detectors//_data` - -==== Description +[[ml-post-data-desc]] +==== {api-description-title} The job must have a state of `open` to receive and process the data. @@ -42,14 +42,14 @@ IMPORTANT: For each job, data can only be accepted from a single connection at a time. It is not currently possible to post data to multiple jobs using wildcards or a comma-separated list. - -==== Path Parameters +[[ml-post-data-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job - -==== Query Parameters +[[ml-post-data-query-parms]] +==== {api-query-parms-title} `reset_start`:: (string) Specifies the start of the bucket resetting range @@ -57,22 +57,21 @@ or a comma-separated list. `reset_end`:: (string) Specifies the end of the bucket resetting range - -==== Request Body +[[ml-post-data-request-body]] +==== {api-request-body-title} A sequence of one or more JSON documents containing the data to be analyzed. Only whitespace characters are permitted in between the documents. - -==== Authorization +[[ml-post-data-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. -//<>. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-post-data-example]] +==== {api-examples-title} The following example posts data from the it_ops_new_kpi.json file to the `it_ops_new_kpi` job: diff --git a/docs/reference/ml/apis/preview-datafeed.asciidoc b/docs/reference/ml/apis/preview-datafeed.asciidoc index 83af6a78057cc..cfffe96b3de37 100644 --- a/docs/reference/ml/apis/preview-datafeed.asciidoc +++ b/docs/reference/ml/apis/preview-datafeed.asciidoc @@ -10,33 +10,33 @@ Previews a {dfeed}. - -==== Request +[[ml-preview-datafeed-request]] +==== {api-request-title} `GET _ml/datafeeds//_preview` - -==== Description +[[ml-preview-datafeed-desc]] +==== {api-description-title} The preview {dfeeds} API returns the first "page" of results from the `search` that is created by using the current {dfeed} settings. This preview shows the structure of the data that will be passed to the anomaly detection engine. - -==== Path Parameters +[[ml-preview-datafeed-path-parms]] +==== {api-path-parms-title} `datafeed_id` (required):: (string) Identifier for the {dfeed} - -==== Authorization +[[ml-preview-datafeed-prereqs]] +==== {api-prereq-title} If {es} {security-features} are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. +[[ml-preview-datafeed-security]] ==== Security Integration When {es} {security-features} are enabled, the {dfeed} query is previewed using @@ -47,8 +47,8 @@ not accurately reflect what the {dfeed} will return when started. To avoid such problems, the same user that creates/updates the {dfeed} should preview it to ensure it is returning the expected data. - -==== Examples +[[ml-preview-datafeed-example]] +==== {api-examples-title} The following example obtains a preview of the `datafeed-farequote` {dfeed}: diff --git a/docs/reference/ml/apis/put-calendar-job.asciidoc b/docs/reference/ml/apis/put-calendar-job.asciidoc index cafc5f670627c..abf124c8a1114 100644 --- a/docs/reference/ml/apis/put-calendar-job.asciidoc +++ b/docs/reference/ml/apis/put-calendar-job.asciidoc @@ -8,12 +8,13 @@ Adds a job to a calendar. -==== Request +[[ml-put-calendar-job-request]] +==== {api-request-title} `PUT _ml/calendars//jobs/` - -==== Path Parameters +[[ml-put-calendar-job-path-parms]] +==== {api-path-parms-title} `calendar_id` (required):: (string) Identifier for the calendar. @@ -22,14 +23,15 @@ Adds a job to a calendar. (string) An identifier for the job. It can be a job identifier, a group name, or a comma-separated list of jobs or groups. -==== Authorization +[[ml-put-calendar-job-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security Privileges]. -==== Examples +[[ml-put-calendar-job-example]] +==== {api-examples-title} The following example associates the `planned-outages` calendar with the `total-requests` job: diff --git a/docs/reference/ml/apis/put-calendar.asciidoc b/docs/reference/ml/apis/put-calendar.asciidoc index 9b1e781e3cc06..b7ea586a106a6 100644 --- a/docs/reference/ml/apis/put-calendar.asciidoc +++ b/docs/reference/ml/apis/put-calendar.asciidoc @@ -8,35 +8,38 @@ Instantiates a calendar. -==== Request +[[ml-put-calendar-request]] +==== {api-request-title} `PUT _ml/calendars/` -===== Description +[[ml-put-calendar-desc]] +==== {api-description-title} For more information, see -{xpack-ref}/ml-calendars.html[Calendars and Scheduled Events]. +{stack-ov}/ml-calendars.html[Calendars and Scheduled Events]. -==== Path Parameters +[[ml-put-calendar-path-parms]] +==== {api-path-parms-title} `calendar_id` (required):: (string) Identifier for the calendar. - -==== Request Body +[[ml-put-calendar-request-body]] +==== {api-request-body-title} `description`:: (string) A description of the calendar. - -==== Authorization +[[ml-put-calendar-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-put-calendar-example]] +==== {api-examples-title} The following example creates the `planned-outages` calendar: diff --git a/docs/reference/ml/apis/put-datafeed.asciidoc b/docs/reference/ml/apis/put-datafeed.asciidoc index 2e0f6700191cd..428af146b4da4 100644 --- a/docs/reference/ml/apis/put-datafeed.asciidoc +++ b/docs/reference/ml/apis/put-datafeed.asciidoc @@ -10,13 +10,13 @@ Instantiates a {dfeed}. - -==== Request +[[ml-put-datafeed-request]] +==== {api-request-title} `PUT _ml/datafeeds/` - -==== Description +[[ml-put-datafeed-desc]] +==== {api-description-title} You must create a job before you create a {dfeed}. You can associate only one {dfeed} to each job. @@ -26,16 +26,16 @@ IMPORTANT: You must use {kib} or this API to create a {dfeed}. Do not put a {df If {es} {security-features} are enabled, do not give users `write` privileges on the `.ml-config` index. - -==== Path Parameters +[[ml-put-datafeed-path-parms]] +==== {api-path-parms-title} `feed_id` (required):: (string) A numerical character string that uniquely identifies the {dfeed}. This identifier can contain lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It must start and end with alphanumeric characters. - -==== Request Body +[[ml-put-datafeed-request-body]] +==== {api-request-body-title} `aggregations`:: (object) If set, the {dfeed} performs aggregation searches. @@ -90,22 +90,22 @@ IMPORTANT: You must use {kib} or this API to create a {dfeed}. Do not put a {df For more information about these properties, see <>. - -==== Authorization +[[ml-put-datafeed-prereqs]] +==== {api-prereq-title} If {es} {security-features} are enabled, you must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. +[[ml-put-datafeed-security]] ==== Security integration When {es} {security-features} are enabled, your {dfeed} remembers which roles the user who created it had at the time of creation and runs the query using those same roles. - -==== Examples +[[ml-put-datafeed-example]] +==== {api-examples-title} The following example creates the `datafeed-total-requests` {dfeed}: diff --git a/docs/reference/ml/apis/put-filter.asciidoc b/docs/reference/ml/apis/put-filter.asciidoc index abe52dfb13b25..61ed24f4d5b9b 100644 --- a/docs/reference/ml/apis/put-filter.asciidoc +++ b/docs/reference/ml/apis/put-filter.asciidoc @@ -8,23 +8,26 @@ Instantiates a filter. -==== Request +[[ml-put-filter-request]] +==== {api-request-title} `PUT _ml/filters/` -===== Description +[[ml-put-filter-desc]] +==== {api-description-title} A {stack-ov}/ml-rules.html[filter] contains a list of strings. It can be used by one or more jobs. Specifically, filters are referenced in the `custom_rules` property of <>. -==== Path Parameters +[[ml-put-filter-path-parms]] +==== {api-path-parms-title} `filter_id` (required):: (string) Identifier for the filter. - -==== Request Body +[[ml-put-filter-request-body]] +==== {api-request-body-title} `description`:: (string) A description of the filter. @@ -35,15 +38,15 @@ the `custom_rules` property of <` -===== Description +[[ml-put-job-desc]] +==== {api-description-title} IMPORTANT: You must use {kib} or this API to create a {ml} job. Do not put a job directly to the `.ml-config` index using the Elasticsearch index API. If {es} {security-features} are enabled, do not give users `write` privileges on the `.ml-config` index. - -==== Path Parameters +[[ml-put-job-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job. This identifier can contain lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It must start and end with alphanumeric characters. - -==== Request Body +[[ml-put-job-request-body]] +==== {api-request-body-title} `analysis_config`:: (object) The analysis configuration, which specifies how to analyze the data. @@ -78,14 +80,15 @@ IMPORTANT: You must use {kib} or this API to create a {ml} job. Do not put a job (long) Advanced configuration option. The number of days for which job results are retained. See <>. -==== Authorization +[[ml-put-job-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-put-job-example]] +==== {api-examples-title} The following example creates the `total-requests` job: diff --git a/docs/reference/ml/apis/revert-snapshot.asciidoc b/docs/reference/ml/apis/revert-snapshot.asciidoc index b560f7b041206..f470b4ec60ffb 100644 --- a/docs/reference/ml/apis/revert-snapshot.asciidoc +++ b/docs/reference/ml/apis/revert-snapshot.asciidoc @@ -8,12 +8,13 @@ Reverts to a specific snapshot. -==== Request +[[ml-revert-snapshot-request]] +==== {api-request-title} `POST _ml/anomaly_detectors//model_snapshots//_revert` - -==== Description +[[ml-revert-snapshot-desc]] +==== {api-description-title} The {ml} feature in {xpack} reacts quickly to anomalous input, learning new behaviors in data. Highly anomalous input increases the variance in the models @@ -25,8 +26,8 @@ Friday or a critical system failure. IMPORTANT: Before you revert to a saved snapshot, you must close the job. - -==== Path Parameters +[[ml-revert-snapshot-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job @@ -34,7 +35,8 @@ IMPORTANT: Before you revert to a saved snapshot, you must close the job. `snapshot_id` (required):: (string) Identifier for the model snapshot -==== Request Body +[[ml-revert-snapshot-request-body]] +==== {api-request-body-title} `delete_intervening_results`:: (boolean) If true, deletes the results in the time period between the @@ -45,15 +47,15 @@ NOTE: If you choose not to delete intervening results when reverting a snapshot, the job will not accept input data that is older than the current time. If you want to resend data, then delete the intervening results. - -==== Authorization +[[ml-revert-snapshot-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-revert-snapshot-example]] +==== {api-examples-title} The following example reverts to the `1491856080` snapshot for the `it_ops_new_kpi` job: diff --git a/docs/reference/ml/apis/set-upgrade-mode.asciidoc b/docs/reference/ml/apis/set-upgrade-mode.asciidoc index 5434d70d4e61e..16ddbe19e5995 100644 --- a/docs/reference/ml/apis/set-upgrade-mode.asciidoc +++ b/docs/reference/ml/apis/set-upgrade-mode.asciidoc @@ -9,7 +9,8 @@ Sets a cluster wide upgrade_mode setting that prepares {ml} indices for an upgrade. -==== Request +[[ml-set-upgrade-mode-request]] +==== {api-request-title} ////////////////////////// [source,js] @@ -25,7 +26,8 @@ POST /_ml/set_upgrade_mode?enabled=false&timeout=10m `POST _ml/set_upgrade_mode` -==== Description +[[ml-set-upgrade-mode-desc]] +==== {api-description-title} When upgrading your cluster, in some circumstances you must restart your nodes and reindex your {ml} indices. In those circumstances, there must be no {ml} jobs running. @@ -37,7 +39,6 @@ though stopping jobs is not a requirement in that case. For more information, see {stack-ref}/upgrading-elastic-stack.html[Upgrading the {stack}]. - When `enabled=true` this API temporarily halts all job and {dfeed} tasks and prohibits new job and {dfeed} tasks from starting. @@ -50,7 +51,8 @@ You can see the current value for the `upgrade_mode` setting by using the IMPORTANT: No new {ml} jobs can be opened while the `upgrade_mode` setting is `true`. -==== Query Parameters +[[ml-set-upgrade-mode-query-parms]] +==== {api-query-parms-title} `enabled`:: (boolean) When `true`, this enables `upgrade_mode`. Defaults to `false` @@ -59,14 +61,15 @@ IMPORTANT: No new {ml} jobs can be opened while the `upgrade_mode` setting is (time) The time to wait for the request to be completed. The default value is 30 seconds. -==== Authorization +[[ml-set-upgrade-mode-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {stack-ov}/security-privileges.html[Security privileges]. - -==== Examples +[[ml-set-upgrade-mode-example]] +==== {api-examples-title} The following example enables `upgrade_mode` for the cluster: diff --git a/docs/reference/ml/apis/start-datafeed.asciidoc b/docs/reference/ml/apis/start-datafeed.asciidoc index aee237b72c837..35c632d5c41c6 100644 --- a/docs/reference/ml/apis/start-datafeed.asciidoc +++ b/docs/reference/ml/apis/start-datafeed.asciidoc @@ -12,11 +12,13 @@ Starts one or more {dfeeds}. A {dfeed} must be started in order to retrieve data from {es}. A {dfeed} can be started and stopped multiple times throughout its lifecycle. -==== Request +[[ml-start-datafeed-request]] +==== {api-request-title} `POST _ml/datafeeds//_start` -==== Description +[[ml-start-datafeed-desc]] +==== {api-description-title} NOTE: Before you can start a {dfeed}, the job must be open. Otherwise, an error occurs. @@ -56,13 +58,14 @@ If you specify a `start` value that is earlier than the timestamp of the latest processed record, the {dfeed} continues from 1 millisecond after the timestamp of the latest processed record. - -==== Path Parameters +[[ml-start-datafeed-path-parms]] +==== {api-path-parms-title} `feed_id` (required):: (string) Identifier for the {dfeed} -==== Request Body +[[ml-start-datafeed-request-body]] +==== {api-request-body-title} `end`:: (string) The time that the {dfeed} should end. This value is exclusive. @@ -76,22 +79,22 @@ of the latest processed record. (time) Controls the amount of time to wait until a {dfeed} starts. The default value is 20 seconds. - -==== Authorization +[[ml-start-datafeed-prereqs]] +==== {api-prereq-title} If {es} {security-features} are enabled, you must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. +[[ml-start-datafeed-security]] ==== Security integration When {es} {security-features} are enabled, your {dfeed} remembers which roles the last user to create or update it had at the time of creation/update and runs the query using those same roles. - -==== Examples +[[ml-start-datafeed-example]] +==== {api-examples-title} The following example starts the `datafeed-it-ops-kpi` {dfeed}: diff --git a/docs/reference/ml/apis/stop-datafeed.asciidoc b/docs/reference/ml/apis/stop-datafeed.asciidoc index 1489137b9db07..497975f425c88 100644 --- a/docs/reference/ml/apis/stop-datafeed.asciidoc +++ b/docs/reference/ml/apis/stop-datafeed.asciidoc @@ -13,7 +13,8 @@ Stops one or more {dfeeds}. A {dfeed} that is stopped ceases to retrieve data from {es}. A {dfeed} can be started and stopped multiple times throughout its lifecycle. -==== Request +[[ml-stop-datafeed-request]] +==== {api-request-title} `POST _ml/datafeeds//_stop` + @@ -21,22 +22,22 @@ A {dfeed} can be started and stopped multiple times throughout its lifecycle. `POST _ml/datafeeds/_all/_stop` - -===== Description +[[ml-stop-datafeed-desc]] +==== {api-description-title} You can stop multiple {dfeeds} in a single API request by using a comma-separated list of {dfeeds} or a wildcard expression. You can close all {dfeeds} by using `_all` or by specifying `*` as the ``. - -==== Path Parameters +[[ml-stop-datafeed-path-parms]] +==== {api-path-parms-title} `feed_id`:: (string) Identifier for the {dfeed}. It can be a {dfeed} identifier or a wildcard expression. - -==== Request Body +[[ml-stop-datafeed-request-body]] +==== {api-request-body-title} `force`:: (boolean) If true, the {dfeed} is stopped forcefully. @@ -45,15 +46,15 @@ comma-separated list of {dfeeds} or a wildcard expression. You can close all (time) Controls the amount of time to wait until a {dfeed} stops. The default value is 20 seconds. - -==== Authorization +[[ml-stop-datafeed-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-stop-datafeed-example]] +==== {api-examples-title} The following example stops the `datafeed-total-requests` {dfeed}: diff --git a/docs/reference/ml/apis/update-datafeed.asciidoc b/docs/reference/ml/apis/update-datafeed.asciidoc index 63878913c7f1a..9c3e56e66a642 100644 --- a/docs/reference/ml/apis/update-datafeed.asciidoc +++ b/docs/reference/ml/apis/update-datafeed.asciidoc @@ -10,21 +10,25 @@ Updates certain properties of a {dfeed}. -==== Request +[[ml-update-datafeed-request]] +==== {api-request-title} `POST _ml/datafeeds//_update` -===== Description +[[ml-update-datafeed-desc]] +==== {api-description-title} NOTE: If you update the `delayed_data_check_config` property, you must stop and start the {dfeed} for the change to be applied. -==== Path Parameters +[[ml-update-datafeed-path-parms]] +==== {api-path-parms-title} `feed_id` (required):: (string) Identifier for the {dfeed} -==== Request Body +[[ml-update-datafeed-request-body]] +==== {api-request-body-title} The following properties can be updated after the {dfeed} is created: @@ -80,22 +84,22 @@ The following properties can be updated after the {dfeed} is created: For more information about these properties, see <>. - -==== Authorization +[[ml-update-datafeed-prereqs]] +==== {api-prereq-title} If {es} {security-features} are enabled, you must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. +[[ml-update-datafeed-security]] ==== Security Integration When {es} {security-features} are enabled, your {dfeed} remembers which roles the user who updated it had at the time of update and runs the query using those same roles. - -==== Examples +[[ml-update-datafeed-example]] +==== {api-examples-title} The following example updates the query for the `datafeed-total-requests` {dfeed} so that only log entries of error level are analyzed: diff --git a/docs/reference/ml/apis/update-filter.asciidoc b/docs/reference/ml/apis/update-filter.asciidoc index 45c294a0b8bc6..842808ebe558a 100644 --- a/docs/reference/ml/apis/update-filter.asciidoc +++ b/docs/reference/ml/apis/update-filter.asciidoc @@ -8,18 +8,18 @@ Updates the description of a filter, adds items, or removes items. -==== Request +[[ml-update-filter-request]] +==== {api-request-title} `POST _ml/filters//_update` -//==== Description - -==== Path Parameters +[[ml-update-filter-path-parms]] +==== {api-path-parms-title} `filter_id` (required):: (string) Identifier for the filter. - +[[ml-update-filter-request-body]] ==== Request Body `description`:: @@ -31,15 +31,15 @@ Updates the description of a filter, adds items, or removes items. `remove_items`:: (array of strings) The items to remove from the filter. - -==== Authorization +[[ml-update-filter-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-update-filter-example]] +==== {api-examples-title} You can change the description, add and remove items to the `safe_domains` filter as follows: diff --git a/docs/reference/ml/apis/update-job.asciidoc b/docs/reference/ml/apis/update-job.asciidoc index 3382e7fe34675..39c510bda1efa 100644 --- a/docs/reference/ml/apis/update-job.asciidoc +++ b/docs/reference/ml/apis/update-job.asciidoc @@ -8,17 +8,19 @@ Updates certain properties of a job. -==== Request +[[ml-update-job-request]] +==== {api-request-title} `POST _ml/anomaly_detectors//_update` - -==== Path Parameters +[[ml-update-job-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job -==== Request Body +[[ml-update-job-request-body]] +==== {api-request-body-title} The following properties can be updated after the job is created: @@ -86,14 +88,15 @@ A detector update object has the following properties: No other detector property can be updated. -==== Authorization +[[ml-update-job-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-update-job-example]] +==== {api-examples-title} The following example updates the `total-requests` job: diff --git a/docs/reference/ml/apis/update-snapshot.asciidoc b/docs/reference/ml/apis/update-snapshot.asciidoc index ffd38f590b1e2..edf9e05d867e7 100644 --- a/docs/reference/ml/apis/update-snapshot.asciidoc +++ b/docs/reference/ml/apis/update-snapshot.asciidoc @@ -8,14 +8,13 @@ Updates certain properties of a snapshot. -==== Request +[[ml-update-snapshot-request]] +==== {api-request-title} `POST _ml/anomaly_detectors//model_snapshots//_update` - -//==== Description - -==== Path Parameters +[[ml-update-snapshot-path-parms]] +==== {api-path-parms-title} `job_id` (required):: (string) Identifier for the job @@ -23,7 +22,8 @@ Updates certain properties of a snapshot. `snapshot_id` (required):: (string) Identifier for the model snapshot -==== Request Body +[[ml-update-snapshot-request-body]] +==== {api-request-body-title} The following properties can be updated after the model snapshot is created: @@ -37,16 +37,15 @@ The following properties can be updated after the model snapshot is created: Note that this snapshot will still be deleted when the job is deleted. The default value is false. - -==== Authorization +[[ml-update-snapshot-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. -//<>. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-update-snapshot-example]] +==== {api-examples-title} The following example updates the snapshot identified as `1491852978`: diff --git a/docs/reference/ml/apis/validate-detector.asciidoc b/docs/reference/ml/apis/validate-detector.asciidoc index 0f9fe9902e36e..a3b7ca66072c1 100644 --- a/docs/reference/ml/apis/validate-detector.asciidoc +++ b/docs/reference/ml/apis/validate-detector.asciidoc @@ -8,30 +8,32 @@ Validates detector configuration information. -==== Request +[[ml-valid-detector-request]] +==== {api-request-title} `POST _ml/anomaly_detectors/_validate/detector` -==== Description +[[ml-valid-detector-desc]] +==== {api-description-title} The validate detectors API enables you validate the detector configuration before you create a job. - -==== Request Body +[[ml-valid-detector-request-body]] +==== {api-request-body-title} For a list of the properties that you can specify in the body of this API, see <>. - -==== Authorization +[[ml-valid-detector-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-valid-detector-example]] +==== {api-examples-title} The following example validates detector configuration information: diff --git a/docs/reference/ml/apis/validate-job.asciidoc b/docs/reference/ml/apis/validate-job.asciidoc index 5fbfb62dd28a6..651e45715699d 100644 --- a/docs/reference/ml/apis/validate-job.asciidoc +++ b/docs/reference/ml/apis/validate-job.asciidoc @@ -8,30 +8,32 @@ Validates job configuration information. -==== Request +[[ml-valid-job-request]] +==== {api-request-title} `POST _ml/anomaly_detectors/_validate` -==== Description +[[ml-valid-job-desc]] +==== {api-description-title} The validate jobs API enables you validate the job configuration before you create the job. - -==== Request Body +[[ml-valid-job-request-body]] +==== {api-request-body-title} For a list of the properties that you can specify in the body of this API, see <>. - -==== Authorization +[[ml-valid-job-prereqs]] +==== {api-prereq-title} You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see -{xpack-ref}/security-privileges.html[Security Privileges]. - +{stack-ov}/security-privileges.html[Security privileges]. -==== Examples +[[ml-valid-job-example]] +==== {api-examples-title} The following example validates job configuration information: From 46d5d68bbbb8dd68720aec2ac186061ab63d31dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Thu, 27 Jun 2019 18:52:46 +0200 Subject: [PATCH 044/140] Add version and create_time to data frame analytics config (#43683) --- .../ml/PutDataFrameAnalyticsRequest.java | 6 ++ .../dataframe/DataFrameAnalyticsConfig.java | 69 ++++++++++++-- .../DataFrameAnalyticsConfigTests.java | 8 ++ .../dataframe/DataFrameAnalyticsConfig.java | 89 +++++++++++++++++-- .../persistence/ElasticsearchMappings.java | 8 ++ .../ml/job/results/ReservedFieldNames.java | 2 + .../DataFrameAnalyticsConfigTests.java | 71 +++++++++++++-- .../ml/qa/ml-with-security/build.gradle | 2 + .../TransportPutDataFrameAnalyticsAction.java | 8 +- .../test/data_frame/transforms_crud.yml | 38 ++++++++ .../test/ml/data_frame_analytics_crud.yml | 57 ++++++++++++ 11 files changed, 335 insertions(+), 23 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java index 14950a74c9187..2624b68a98318 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java @@ -22,6 +22,7 @@ import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -67,4 +68,9 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(config); } + + @Override + public String toString() { + return Strings.toString(this); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java index b1309e66afcd4..62adb06294558 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java @@ -19,11 +19,14 @@ package org.elasticsearch.client.ml.dataframe; +import org.elasticsearch.Version; +import org.elasticsearch.client.dataframe.transforms.util.TimeUtil; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -31,11 +34,9 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; +import java.time.Instant; import java.util.Objects; -import static org.elasticsearch.common.xcontent.ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING; -import static org.elasticsearch.common.xcontent.ObjectParser.ValueType.VALUE; - public class DataFrameAnalyticsConfig implements ToXContentObject { public static DataFrameAnalyticsConfig fromXContent(XContentParser parser) { @@ -52,6 +53,8 @@ public static Builder builder(String id) { private static final ParseField ANALYSIS = new ParseField("analysis"); private static final ParseField ANALYZED_FIELDS = new ParseField("analyzed_fields"); private static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit"); + private static final ParseField CREATE_TIME = new ParseField("create_time"); + private static final ParseField VERSION = new ParseField("version"); private static ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_config", true, Builder::new); @@ -63,9 +66,24 @@ public static Builder builder(String id) { PARSER.declareField(Builder::setAnalyzedFields, (p, c) -> FetchSourceContext.fromXContent(p), ANALYZED_FIELDS, - OBJECT_ARRAY_BOOLEAN_OR_STRING); + ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING); PARSER.declareField(Builder::setModelMemoryLimit, - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()), MODEL_MEMORY_LIMIT, VALUE); + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()), + MODEL_MEMORY_LIMIT, + ValueType.VALUE); + PARSER.declareField(Builder::setCreateTime, + p -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()), + CREATE_TIME, + ValueType.VALUE); + PARSER.declareField(Builder::setVersion, + p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return Version.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, + VERSION, + ValueType.STRING); } private static DataFrameAnalysis parseAnalysis(XContentParser parser) throws IOException { @@ -82,15 +100,20 @@ private static DataFrameAnalysis parseAnalysis(XContentParser parser) throws IOE private final DataFrameAnalysis analysis; private final FetchSourceContext analyzedFields; private final ByteSizeValue modelMemoryLimit; + private final Instant createTime; + private final Version version; private DataFrameAnalyticsConfig(String id, DataFrameAnalyticsSource source, DataFrameAnalyticsDest dest, DataFrameAnalysis analysis, - @Nullable FetchSourceContext analyzedFields, @Nullable ByteSizeValue modelMemoryLimit) { + @Nullable FetchSourceContext analyzedFields, @Nullable ByteSizeValue modelMemoryLimit, + @Nullable Instant createTime, @Nullable Version version) { this.id = Objects.requireNonNull(id); this.source = Objects.requireNonNull(source); this.dest = Objects.requireNonNull(dest); this.analysis = Objects.requireNonNull(analysis); this.analyzedFields = analyzedFields; this.modelMemoryLimit = modelMemoryLimit; + this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli());; + this.version = version; } public String getId() { @@ -117,6 +140,14 @@ public ByteSizeValue getModelMemoryLimit() { return modelMemoryLimit; } + public Instant getCreateTime() { + return createTime; + } + + public Version getVersion() { + return version; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -132,6 +163,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (modelMemoryLimit != null) { builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit.getStringRep()); } + if (createTime != null) { + builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); + } + if (version != null) { + builder.field(VERSION.getPreferredName(), version); + } builder.endObject(); return builder; } @@ -147,12 +184,14 @@ public boolean equals(Object o) { && Objects.equals(dest, other.dest) && Objects.equals(analysis, other.analysis) && Objects.equals(analyzedFields, other.analyzedFields) - && Objects.equals(modelMemoryLimit, other.modelMemoryLimit); + && Objects.equals(modelMemoryLimit, other.modelMemoryLimit) + && Objects.equals(createTime, other.createTime) + && Objects.equals(version, other.version); } @Override public int hashCode() { - return Objects.hash(id, source, dest, analysis, analyzedFields, getModelMemoryLimit()); + return Objects.hash(id, source, dest, analysis, analyzedFields, modelMemoryLimit, createTime, version); } @Override @@ -168,6 +207,8 @@ public static class Builder { private DataFrameAnalysis analysis; private FetchSourceContext analyzedFields; private ByteSizeValue modelMemoryLimit; + private Instant createTime; + private Version version; private Builder() {} @@ -201,8 +242,18 @@ public Builder setModelMemoryLimit(ByteSizeValue modelMemoryLimit) { return this; } + public Builder setCreateTime(Instant createTime) { + this.createTime = createTime; + return this; + } + + public Builder setVersion(Version version) { + this.version = version; + return this; + } + public DataFrameAnalyticsConfig build() { - return new DataFrameAnalyticsConfig(id, source, dest, analysis, analyzedFields, modelMemoryLimit); + return new DataFrameAnalyticsConfig(id, source, dest, analysis, analyzedFields, modelMemoryLimit, createTime, version); } } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigTests.java index f6826af551d0a..957afc69dd0c2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.client.ml.dataframe; +import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -29,6 +30,7 @@ import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -54,6 +56,12 @@ public static DataFrameAnalyticsConfig randomDataFrameAnalyticsConfig() { if (randomBoolean()) { builder.setModelMemoryLimit(new ByteSizeValue(randomIntBetween(1, 16), randomFrom(ByteSizeUnit.MB, ByteSizeUnit.GB))); } + if (randomBoolean()) { + builder.setCreateTime(Instant.now()); + } + if (randomBoolean()) { + builder.setVersion(Version.CURRENT); + } return builder.build(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfig.java index 0e9acdd44a2fe..99460a6883a56 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfig.java @@ -5,7 +5,9 @@ */ package org.elasticsearch.xpack.core.ml.dataframe; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -17,12 +19,14 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.xpack.core.common.time.TimeUtils; import org.elasticsearch.xpack.core.ml.dataframe.analyses.DataFrameAnalysis; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import java.io.IOException; +import java.time.Instant; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -47,6 +51,8 @@ public class DataFrameAnalyticsConfig implements ToXContentObject, Writeable { public static final ParseField ANALYZED_FIELDS = new ParseField("analyzed_fields"); public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit"); public static final ParseField HEADERS = new ParseField("headers"); + public static final ParseField CREATE_TIME = new ParseField("create_time"); + public static final ParseField VERSION = new ParseField("version"); public static final ObjectParser STRICT_PARSER = createParser(false); public static final ObjectParser LENIENT_PARSER = createParser(true); @@ -69,6 +75,18 @@ public static ObjectParser createParser(boolean ignoreUnknownFiel // Headers are not parsed by the strict (config) parser, so headers supplied in the _body_ of a REST request will be rejected. // (For config, headers are explicitly transferred from the auth headers by code in the put data frame actions.) parser.declareObject(Builder::setHeaders, (p, c) -> p.mapStrings(), HEADERS); + // Creation time is set automatically during PUT, so create_time supplied in the _body_ of a REST request will be rejected. + parser.declareField(Builder::setCreateTime, + p -> TimeUtils.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()), + CREATE_TIME, + ObjectParser.ValueType.VALUE); + // Version is set automatically during PUT, so version supplied in the _body_ of a REST request will be rejected. + parser.declareField(Builder::setVersion, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return Version.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, VERSION, ObjectParser.ValueType.STRING); } return parser; } @@ -96,10 +114,12 @@ private static DataFrameAnalysis parseAnalysis(XContentParser parser, boolean ig */ private final ByteSizeValue modelMemoryLimit; private final Map headers; + private final Instant createTime; + private final Version version; public DataFrameAnalyticsConfig(String id, DataFrameAnalyticsSource source, DataFrameAnalyticsDest dest, DataFrameAnalysis analysis, Map headers, ByteSizeValue modelMemoryLimit, - FetchSourceContext analyzedFields) { + FetchSourceContext analyzedFields, Instant createTime, Version version) { this.id = ExceptionsHelper.requireNonNull(id, ID); this.source = ExceptionsHelper.requireNonNull(source, SOURCE); this.dest = ExceptionsHelper.requireNonNull(dest, DEST); @@ -107,16 +127,25 @@ public DataFrameAnalyticsConfig(String id, DataFrameAnalyticsSource source, Data this.analyzedFields = analyzedFields; this.modelMemoryLimit = modelMemoryLimit; this.headers = Collections.unmodifiableMap(headers); + this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli());; + this.version = version; } public DataFrameAnalyticsConfig(StreamInput in) throws IOException { - id = in.readString(); - source = new DataFrameAnalyticsSource(in); - dest = new DataFrameAnalyticsDest(in); - analysis = in.readNamedWriteable(DataFrameAnalysis.class); + this.id = in.readString(); + this.source = new DataFrameAnalyticsSource(in); + this.dest = new DataFrameAnalyticsDest(in); + this.analysis = in.readNamedWriteable(DataFrameAnalysis.class); this.analyzedFields = in.readOptionalWriteable(FetchSourceContext::new); this.modelMemoryLimit = in.readOptionalWriteable(ByteSizeValue::new); this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); + if (in.getVersion().onOrAfter(Version.V_7_3_0)) { + createTime = in.readOptionalInstant(); + version = in.readBoolean() ? Version.readVersion(in) : null; + } else { + createTime = null; + version = null; + } } public String getId() { @@ -147,6 +176,14 @@ public Map getHeaders() { return headers; } + public Instant getCreateTime() { + return createTime; + } + + public Version getVersion() { + return version; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -168,6 +205,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (headers.isEmpty() == false && params.paramAsBoolean(ToXContentParams.FOR_INTERNAL_STORAGE, false)) { builder.field(HEADERS.getPreferredName(), headers); } + if (createTime != null) { + builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); + } + if (version != null) { + builder.field(VERSION.getPreferredName(), version); + } builder.endObject(); return builder; } @@ -181,6 +224,15 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(analyzedFields); out.writeOptionalWriteable(modelMemoryLimit); out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString); + if (out.getVersion().onOrAfter(Version.V_7_3_0)) { + out.writeOptionalInstant(createTime); + if (version != null) { + out.writeBoolean(true); + Version.writeVersion(version, out); + } else { + out.writeBoolean(false); + } + } } @Override @@ -195,12 +247,19 @@ public boolean equals(Object o) { && Objects.equals(analysis, other.analysis) && Objects.equals(headers, other.headers) && Objects.equals(getModelMemoryLimit(), other.getModelMemoryLimit()) - && Objects.equals(analyzedFields, other.analyzedFields); + && Objects.equals(analyzedFields, other.analyzedFields) + && Objects.equals(createTime, other.createTime) + && Objects.equals(version, other.version); } @Override public int hashCode() { - return Objects.hash(id, source, dest, analysis, headers, getModelMemoryLimit(), analyzedFields); + return Objects.hash(id, source, dest, analysis, headers, getModelMemoryLimit(), analyzedFields, createTime, version); + } + + @Override + public String toString() { + return Strings.toString(this); } public static String documentId(String id) { @@ -217,6 +276,8 @@ public static class Builder { private ByteSizeValue modelMemoryLimit; private ByteSizeValue maxModelMemoryLimit; private Map headers = Collections.emptyMap(); + private Instant createTime; + private Version version; public Builder() {} @@ -243,6 +304,8 @@ public Builder(DataFrameAnalyticsConfig config, ByteSizeValue maxModelMemoryLimi if (config.analyzedFields != null) { this.analyzedFields = new FetchSourceContext(true, config.analyzedFields.includes(), config.analyzedFields.excludes()); } + this.createTime = config.createTime; + this.version = config.version; } public String getId() { @@ -304,9 +367,19 @@ private void applyMaxModelMemoryLimit() { } } + public Builder setCreateTime(Instant createTime) { + this.createTime = createTime; + return this; + } + + public Builder setVersion(Version version) { + this.version = version; + return this; + } + public DataFrameAnalyticsConfig build() { applyMaxModelMemoryLimit(); - return new DataFrameAnalyticsConfig(id, source, dest, analysis, headers, modelMemoryLimit, analyzedFields); + return new DataFrameAnalyticsConfig(id, source, dest, analysis, headers, modelMemoryLimit, analyzedFields, createTime, version); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index bc69f4b5d5e20..75ce2d53315c3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -391,6 +391,10 @@ public static void addDatafeedConfigFields(XContentBuilder builder) throws IOExc .endObject(); } + /** + * {@link DataFrameAnalyticsConfig} mapping. + * Does not include mapping for CREATE_TIME as this mapping is added by {@link #addJobConfigFields} method. + */ public static void addDataFrameAnalyticsFields(XContentBuilder builder) throws IOException { builder.startObject(DataFrameAnalyticsConfig.ID.getPreferredName()) .field(TYPE, KEYWORD) @@ -434,6 +438,10 @@ public static void addDataFrameAnalyticsFields(XContentBuilder builder) throws I .endObject() .endObject() .endObject() + .endObject() + // re-used: CREATE_TIME + .startObject(DataFrameAnalyticsConfig.VERSION.getPreferredName()) + .field(TYPE, KEYWORD) .endObject(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java index 39036abb693b0..eff33a37d9773 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java @@ -277,6 +277,8 @@ public final class ReservedFieldNames { DataFrameAnalyticsConfig.DEST.getPreferredName(), DataFrameAnalyticsConfig.ANALYSIS.getPreferredName(), DataFrameAnalyticsConfig.ANALYZED_FIELDS.getPreferredName(), + DataFrameAnalyticsConfig.CREATE_TIME.getPreferredName(), + DataFrameAnalyticsConfig.VERSION.getPreferredName(), DataFrameAnalyticsDest.INDEX.getPreferredName(), DataFrameAnalyticsDest.RESULTS_FIELD.getPreferredName(), DataFrameAnalyticsSource.INDEX.getPreferredName(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfigTests.java index dd9b229913aa9..5464181d17b1d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfigTests.java @@ -8,6 +8,7 @@ import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; @@ -17,6 +18,7 @@ import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -30,16 +32,18 @@ import org.elasticsearch.xpack.core.ml.dataframe.analyses.MlDataFrameAnalysisNamedXContentProvider; import org.elasticsearch.xpack.core.ml.dataframe.analyses.OutlierDetectionTests; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; +import org.junit.Before; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; @@ -49,7 +53,11 @@ public class DataFrameAnalyticsConfigTests extends AbstractSerializingTestCase dataFrameAnalyticsConfigParser = + lenient + ? DataFrameAnalyticsConfig.LENIENT_PARSER + : DataFrameAnalyticsConfig.STRICT_PARSER; + return dataFrameAnalyticsConfigParser.apply(parser, null).build(); } @Override @@ -70,7 +78,7 @@ protected NamedXContentRegistry xContentRegistry() { @Override protected DataFrameAnalyticsConfig createTestInstance() { - return createRandom(randomValidId()); + return createRandom(randomValidId(), lenient); } @Override @@ -79,10 +87,18 @@ protected Writeable.Reader instanceReader() { } public static DataFrameAnalyticsConfig createRandom(String id) { - return createRandomBuilder(id).build(); + return createRandom(id, false); + } + + public static DataFrameAnalyticsConfig createRandom(String id, boolean withGeneratedFields) { + return createRandomBuilder(id, withGeneratedFields).build(); } public static DataFrameAnalyticsConfig.Builder createRandomBuilder(String id) { + return createRandomBuilder(id, false); + } + + public static DataFrameAnalyticsConfig.Builder createRandomBuilder(String id, boolean withGeneratedFields) { DataFrameAnalyticsSource source = DataFrameAnalyticsSourceTests.createRandom(); DataFrameAnalyticsDest dest = DataFrameAnalyticsDestTests.createRandom(); DataFrameAnalyticsConfig.Builder builder = new DataFrameAnalyticsConfig.Builder() @@ -98,6 +114,14 @@ public static DataFrameAnalyticsConfig.Builder createRandomBuilder(String id) { if (randomBoolean()) { builder.setModelMemoryLimit(new ByteSizeValue(randomIntBetween(1, 16), randomFrom(ByteSizeUnit.MB, ByteSizeUnit.GB))); } + if (withGeneratedFields) { + if (randomBoolean()) { + builder.setCreateTime(Instant.now()); + } + if (randomBoolean()) { + builder.setVersion(Version.CURRENT); + } + } return builder; } @@ -122,6 +146,13 @@ public static String randomValidId() { " \"analysis\": {\"outlier_detection\": {\"n_neighbors\": 10}}\n" + "}"; + private boolean lenient; + + @Before + public void chooseStrictOrLenient() { + lenient = randomBoolean(); + } + public void testQueryConfigStoresUserInputOnly() throws IOException { try (XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(xContentRegistry(), @@ -245,6 +276,36 @@ public void testExplicitModelMemoryLimitTooHigh() { assertThat(e.getMessage(), containsString("must be less than the value of the xpack.ml.max_model_memory_limit setting")); } + public void testPreventCreateTimeInjection() throws IOException { + String json = "{" + + " \"create_time\" : 123456789 }," + + " \"source\" : {\"index\":\"src\"}," + + " \"dest\" : {\"index\": \"dest\"}," + + "}"; + + try (XContentParser parser = + XContentFactory.xContent(XContentType.JSON).createParser( + xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json)) { + Exception e = expectThrows(IllegalArgumentException.class, () -> DataFrameAnalyticsConfig.STRICT_PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("unknown field [create_time], parser not found")); + } + } + + public void testPreventVersionInjection() throws IOException { + String json = "{" + + " \"version\" : \"7.3.0\"," + + " \"source\" : {\"index\":\"src\"}," + + " \"dest\" : {\"index\": \"dest\"}," + + "}"; + + try (XContentParser parser = + XContentFactory.xContent(XContentType.JSON).createParser( + xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json)) { + Exception e = expectThrows(IllegalArgumentException.class, () -> DataFrameAnalyticsConfig.STRICT_PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("unknown field [version], parser not found")); + } + } + public void assertTooSmall(IllegalArgumentException e) { assertThat(e.getMessage(), is("[model_memory_limit] must be at least [1mb]")); } diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index 428721a0d5536..686f6ed3fbec5 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -42,6 +42,8 @@ integTest.runner { 'ml/datafeeds_crud/Test put datafeed with security headers in the body', 'ml/datafeeds_crud/Test update datafeed with missing id', 'ml/data_frame_analytics_crud/Test put config with security headers in the body', + 'ml/data_frame_analytics_crud/Test put config with create_time in the body', + 'ml/data_frame_analytics_crud/Test put config with version in the body', 'ml/data_frame_analytics_crud/Test put config with inconsistent body/param ids', 'ml/data_frame_analytics_crud/Test put config with invalid id', 'ml/data_frame_analytics_crud/Test put config with invalid dest index name', diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java index 0f709b4e16680..d8f5dbb469f5f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ml.action; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -41,6 +42,7 @@ import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; import java.io.IOException; +import java.time.Instant; import java.util.Objects; import java.util.function.Supplier; @@ -91,7 +93,10 @@ protected void doExecute(Task task, PutDataFrameAnalyticsAction.Request request, } validateConfig(request.getConfig()); DataFrameAnalyticsConfig memoryCappedConfig = - new DataFrameAnalyticsConfig.Builder(request.getConfig(), maxModelMemoryLimit).build(); + new DataFrameAnalyticsConfig.Builder(request.getConfig(), maxModelMemoryLimit) + .setCreateTime(Instant.now()) + .setVersion(Version.CURRENT) + .build(); if (licenseState.isAuthAllowed()) { final String username = securityContext.getUser().principal(); RoleDescriptor.IndicesPrivileges sourceIndexPrivileges = RoleDescriptor.IndicesPrivileges.builder() @@ -156,5 +161,6 @@ private void validateConfig(DataFrameAnalyticsConfig config) { } config.getDest().validate(); new SourceDestValidator(clusterService.state(), indexNameExpressionResolver).check(config); + } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml index 307ecda231b16..bfde8128b491c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml @@ -453,3 +453,41 @@ setup: "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} } } + +--- +"Test put valid config with create_time in the body": + + - do: + catch: /Found \[create_time\], not allowed for strict parsing/ + data_frame.put_data_frame_transform: + transform_id: "airline-transform-with-create-time" + body: > + { + "source": { "index": "airline-data" }, + "dest": { "index": "airline-data-by-airline" }, + "pivot": { + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} + }, + "description": "yaml test transform on airline-data", + "create_time": 123456789 + } + +--- +"Test put valid config with version in the body": + + - do: + catch: /Found \[version\], not allowed for strict parsing/ + data_frame.put_data_frame_transform: + transform_id: "airline-transform-with-version" + body: > + { + "source": { "index": "airline-data" }, + "dest": { "index": "airline-data-by-airline" }, + "pivot": { + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} + }, + "description": "yaml test transform on airline-data", + "version": "7.3.0" + } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml index e5a68fb33834e..01afb7714f395 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml @@ -55,6 +55,21 @@ setup: - match: { dest.index: "index-dest" } - match: { analysis: {"outlier_detection":{}} } - match: { analyzed_fields: {"includes" : ["obj1.*", "obj2.*" ], "excludes": [] } } + - is_true: create_time + - is_true: version + + - do: + ml.get_data_frame_analytics: + id: "simple-outlier-detection-with-query" + - match: { count: 1 } + - match: { data_frame_analytics.0.id: "simple-outlier-detection-with-query" } + - match: { data_frame_analytics.0.source.index: "index-source" } + - match: { data_frame_analytics.0.source.query: {"term" : { "user" : "Kimchy"} } } + - match: { data_frame_analytics.0.dest.index: "index-dest" } + - match: { data_frame_analytics.0.analysis: {"outlier_detection":{}} } + - match: { data_frame_analytics.0.analyzed_fields: {"includes" : ["obj1.*", "obj2.*" ], "excludes": [] } } + - is_true: data_frame_analytics.0.create_time + - is_true: data_frame_analytics.0.version --- "Test put config with security headers in the body": @@ -75,6 +90,44 @@ setup: "headers":{ "a_security_header" : "secret" } } +--- +"Test put config with create_time in the body": + + - do: + catch: /unknown field \[create_time\], parser not found/ + ml.put_data_frame_analytics: + id: "data_frame_with_create_time" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": {"outlier_detection":{}}, + "create_time": 123456789 + } + +--- +"Test put config with version in the body": + + - do: + catch: /unknown field \[version\], parser not found/ + ml.put_data_frame_analytics: + id: "data_frame_with_version" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": {"outlier_detection":{}}, + "version": "7.3.0" + } + --- "Test put valid config with default outlier detection": @@ -96,6 +149,8 @@ setup: - match: { source.query: {"match_all" : {} } } - match: { dest.index: "index-dest" } - match: { analysis: {"outlier_detection":{}} } + - is_true: create_time + - is_true: version --- "Test put valid config with custom outlier detection": @@ -126,6 +181,8 @@ setup: - match: { analysis.outlier_detection.n_neighbors: 5 } - match: { analysis.outlier_detection.method: "lof" } - match: { analysis.outlier_detection.minimum_score_to_write_feature_influence: 0.0 } + - is_true: create_time + - is_true: version --- "Test put config with inconsistent body/param ids": From 25792d31321d464f87b381ac840894b0d90b19f0 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 27 Jun 2019 10:31:50 -0700 Subject: [PATCH 045/140] Remove nodeId from BaseNodeRequest (#43658) TransportNodesAction provides a mechanism to easily broadcast a request to many nodes, and collect the respones into a high level response. Each node has its own request type, with a base class of BaseNodeRequest. This base request requires passing the nodeId to which the request will be sent. However, that nodeId is not used anywhere. It is private to the base class, yet serialized to each node, where the node could just as easily find the nodeId of the node it is on locally. This commit removes passing the nodeId through to the node request creation, and guards its serialization so that we can remove the base request class altogether in the future. --- build.gradle | 4 ++-- .../TransportNodesHotThreadsAction.java | 7 +++--- .../node/info/TransportNodesInfoAction.java | 7 +++--- ...nsportNodesReloadSecureSettingsAction.java | 7 +++--- .../node/stats/TransportNodesStatsAction.java | 7 +++--- .../node/usage/TransportNodesUsageAction.java | 7 +++--- .../status/TransportNodesSnapshotsStatus.java | 7 +++--- .../stats/TransportClusterStatsAction.java | 7 +++--- .../action/support/nodes/BaseNodeRequest.java | 20 ++++++++-------- .../support/nodes/TransportNodesAction.java | 4 ++-- .../TransportNodesListGatewayMetaState.java | 12 ++-------- ...ransportNodesListGatewayStartedShards.java | 7 +++--- .../TransportNodesListShardStoreMetaData.java | 7 +++--- .../node/tasks/CancellableTasksTests.java | 23 ++++++++----------- .../cluster/node/tasks/TestTaskPlugin.java | 13 ++++------- .../node/tasks/TransportTasksActionTests.java | 7 +++--- .../nodes/TransportNodesActionTests.java | 2 +- .../NodesDeprecationCheckAction.java | 3 +-- .../action/realm/ClearRealmCacheRequest.java | 3 +-- .../action/role/ClearRolesCacheRequest.java | 3 +-- .../actions/stats/WatcherStatsRequest.java | 3 +-- .../TransportNodeDeprecationCheckAction.java | 4 ++-- .../realm/TransportClearRealmCacheAction.java | 4 ++-- .../role/TransportClearRolesCacheAction.java | 4 ++-- .../xpack/sql/plugin/SqlStatsRequest.java | 3 +-- .../sql/plugin/TransportSqlStatsAction.java | 4 ++-- .../stats/TransportWatcherStatsAction.java | 4 ++-- .../TransportWatcherStatsActionTests.java | 4 ++-- 28 files changed, 76 insertions(+), 111 deletions(-) diff --git a/build.gradle b/build.gradle index 5c1fe80668283..d9437ba2a3a99 100644 --- a/build.gradle +++ b/build.gradle @@ -160,8 +160,8 @@ task verifyVersions { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = true -final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ +boolean bwc_tests_enabled = false +final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/43658" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index 178ab6c19ea62..4f85177b6e671 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -55,8 +55,8 @@ protected NodesHotThreadsResponse newResponse(NodesHotThreadsRequest request, } @Override - protected NodeRequest newNodeRequest(String nodeId, NodesHotThreadsRequest request) { - return new NodeRequest(nodeId, request); + protected NodeRequest newNodeRequest(NodesHotThreadsRequest request) { + return new NodeRequest(request); } @Override @@ -86,8 +86,7 @@ public static class NodeRequest extends BaseNodeRequest { public NodeRequest() { } - NodeRequest(String nodeId, NodesHotThreadsRequest request) { - super(nodeId); + NodeRequest(NodesHotThreadsRequest request) { this.request = request; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index 078d428c34e11..903f6adb7b931 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -57,8 +57,8 @@ protected NodesInfoResponse newResponse(NodesInfoRequest nodesInfoRequest, } @Override - protected NodeInfoRequest newNodeRequest(String nodeId, NodesInfoRequest request) { - return new NodeInfoRequest(nodeId, request); + protected NodeInfoRequest newNodeRequest(NodesInfoRequest request) { + return new NodeInfoRequest(request); } @Override @@ -80,8 +80,7 @@ public static class NodeInfoRequest extends BaseNodeRequest { public NodeInfoRequest() { } - public NodeInfoRequest(String nodeId, NodesInfoRequest request) { - super(nodeId); + public NodeInfoRequest(NodesInfoRequest request) { this.request = request; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java index 44abbfd85917f..27860b52557e7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -69,8 +69,8 @@ protected NodesReloadSecureSettingsResponse newResponse(NodesReloadSecureSetting } @Override - protected NodeRequest newNodeRequest(String nodeId, NodesReloadSecureSettingsRequest request) { - return new NodeRequest(nodeId, request); + protected NodeRequest newNodeRequest(NodesReloadSecureSettingsRequest request) { + return new NodeRequest(request); } @Override @@ -117,8 +117,7 @@ public static class NodeRequest extends BaseNodeRequest { public NodeRequest() { } - NodeRequest(String nodeId, NodesReloadSecureSettingsRequest request) { - super(nodeId); + NodeRequest(NodesReloadSecureSettingsRequest request) { this.request = request; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 86c3f7b983471..f399304a8a10e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -56,8 +56,8 @@ protected NodesStatsResponse newResponse(NodesStatsRequest request, List responses, List failures); - protected abstract NodeRequest newNodeRequest(String nodeId, NodesRequest request); + protected abstract NodeRequest newNodeRequest(NodesRequest request); protected abstract NodeResponse newNodeResponse(); @@ -170,7 +170,7 @@ void start() { final DiscoveryNode node = nodes[i]; final String nodeId = node.getId(); try { - TransportRequest nodeRequest = newNodeRequest(nodeId, request); + TransportRequest nodeRequest = newNodeRequest(request); if (task != null) { nodeRequest.setParentTask(clusterService.localNode().getId(), task.getId()); } diff --git a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index 715f117e170fa..d06ad91dbbcf2 100644 --- a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -68,8 +68,8 @@ public ActionFuture list(String[] nodesIds, @Nullable Tim } @Override - protected NodeRequest newNodeRequest(String nodeId, Request request) { - return new NodeRequest(nodeId); + protected NodeRequest newNodeRequest(Request request) { + return new NodeRequest(); } @Override @@ -115,14 +115,6 @@ protected void writeNodesTo(StreamOutput out, List nodes) } public static class NodeRequest extends BaseNodeRequest { - - public NodeRequest() { - } - - NodeRequest(String nodeId) { - super(nodeId); - } - } public static class NodeGatewayMetaState extends BaseNodeResponse { diff --git a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 791f32cd43602..be33bb031c32c 100644 --- a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -93,8 +93,8 @@ public void list(ShardId shardId, DiscoveryNode[] nodes, } @Override - protected NodeRequest newNodeRequest(String nodeId, Request request) { - return new NodeRequest(nodeId, request); + protected NodeRequest newNodeRequest(Request request) { + return new NodeRequest(request); } @Override @@ -223,8 +223,7 @@ public static class NodeRequest extends BaseNodeRequest { public NodeRequest() { } - public NodeRequest(String nodeId, Request request) { - super(nodeId); + public NodeRequest(Request request) { this.shardId = request.shardId(); } diff --git a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index f084f7c311bae..20633d24ba7eb 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -92,8 +92,8 @@ public void list(ShardId shardId, DiscoveryNode[] nodes, ActionListener res } @Override - protected NodeRequest newNodeRequest(String nodeId, NodesRequest request) { - return new NodeRequest(request, nodeId, request.getShouldBlock()); + protected NodeRequest newNodeRequest(NodesRequest request) { + return new NodeRequest(request, request.getShouldBlock()); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 3d8ab68d5f25a..6d8749ad4f4c8 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -84,8 +84,7 @@ public NodeRequest() { super(); } - public NodeRequest(NodesRequest request, String nodeId) { - super(nodeId); + public NodeRequest(NodesRequest request) { requestName = request.requestName; } @@ -157,8 +156,8 @@ abstract class TestNodesAction extends AbstractTestNodesAction Date: Thu, 27 Jun 2019 13:34:33 -0400 Subject: [PATCH 046/140] Geo: Makes coordinate validator in libs/geo plugable (#43657) Moves coordinate validation from Geometry constructors into parser. Relates #43644 --- .../elasticsearch/geo/geometry/Circle.java | 2 - .../geo/geometry/GeometryUtils.java | 78 -------- .../org/elasticsearch/geo/geometry/Line.java | 4 - .../org/elasticsearch/geo/geometry/Point.java | 2 - .../elasticsearch/geo/geometry/Rectangle.java | 30 --- .../geo/utils/GeographyValidator.java | 178 ++++++++++++++++++ .../geo/utils/GeometryValidator.java | 34 ++++ .../geo/utils/WellKnownText.java | 15 +- .../geo/geometry/BaseGeometryTestCase.java | 3 +- .../geo/geometry/CircleTests.java | 11 +- .../geo/geometry/GeometryCollectionTests.java | 3 +- .../geo/geometry/GeometryValidatorTests.java | 127 +++++++++++++ .../elasticsearch/geo/geometry/LineTests.java | 16 +- .../geo/geometry/LinearRingTests.java | 19 +- .../geo/geometry/MultiLineTests.java | 3 +- .../geo/geometry/MultiPointTests.java | 3 +- .../geo/geometry/MultiPolygonTests.java | 3 +- .../geo/geometry/PointTests.java | 11 +- .../geo/geometry/PolygonTests.java | 10 +- .../geo/geometry/RectangleTests.java | 17 +- .../org/elasticsearch/common/geo/GeoJson.java | 23 ++- .../common/geo/GeometryParser.java | 9 +- .../common/geo/BaseGeoParsingTestCase.java | 3 +- .../common/geo/GeoDistanceTests.java | 17 -- .../common/geo/GeoJsonParserTests.java | 47 ++--- .../common/geo/GeoJsonSerializationTests.java | 3 +- .../common/geo/GeometryParserTests.java | 2 +- .../extractor/fields/ExtractedField.java | 3 +- .../xpack/sql/jdbc/TypeConverter.java | 3 +- .../xpack/sql/qa/jdbc/JdbcAssert.java | 3 +- .../function/scalar/geo/GeoShape.java | 6 +- 31 files changed, 472 insertions(+), 216 deletions(-) delete mode 100644 libs/geo/src/main/java/org/elasticsearch/geo/geometry/GeometryUtils.java create mode 100644 libs/geo/src/main/java/org/elasticsearch/geo/utils/GeographyValidator.java create mode 100644 libs/geo/src/main/java/org/elasticsearch/geo/utils/GeometryValidator.java create mode 100644 libs/geo/src/test/java/org/elasticsearch/geo/geometry/GeometryValidatorTests.java diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Circle.java b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Circle.java index cb8e2c4cb33e1..ad9881ab72fba 100644 --- a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Circle.java +++ b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Circle.java @@ -49,8 +49,6 @@ public Circle(final double lat, final double lon, final double alt, final double if (radiusMeters < 0 ) { throw new IllegalArgumentException("Circle radius [" + radiusMeters + "] cannot be negative"); } - GeometryUtils.checkLatitude(lat); - GeometryUtils.checkLongitude(lon); } @Override diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/GeometryUtils.java b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/GeometryUtils.java deleted file mode 100644 index c7bfa16b16a8d..0000000000000 --- a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/GeometryUtils.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.geo.geometry; - -/** - * Geometry-related utility methods - */ -public final class GeometryUtils { - /** - * Minimum longitude value. - */ - static final double MIN_LON_INCL = -180.0D; - - /** - * Maximum longitude value. - */ - static final double MAX_LON_INCL = 180.0D; - - /** - * Minimum latitude value. - */ - static final double MIN_LAT_INCL = -90.0D; - - /** - * Maximum latitude value. - */ - static final double MAX_LAT_INCL = 90.0D; - - // No instance: - private GeometryUtils() { - } - - /** - * validates latitude value is within standard +/-90 coordinate bounds - */ - static void checkLatitude(double latitude) { - if (Double.isNaN(latitude) || latitude < MIN_LAT_INCL || latitude > MAX_LAT_INCL) { - throw new IllegalArgumentException( - "invalid latitude " + latitude + "; must be between " + MIN_LAT_INCL + " and " + MAX_LAT_INCL); - } - } - - /** - * validates longitude value is within standard +/-180 coordinate bounds - */ - static void checkLongitude(double longitude) { - if (Double.isNaN(longitude) || longitude < MIN_LON_INCL || longitude > MAX_LON_INCL) { - throw new IllegalArgumentException( - "invalid longitude " + longitude + "; must be between " + MIN_LON_INCL + " and " + MAX_LON_INCL); - } - } - - public static double checkAltitude(final boolean ignoreZValue, double zValue) { - if (ignoreZValue == false) { - throw new IllegalArgumentException("found Z value [" + zValue + "] but [ignore_z_value] " - + "parameter is [" + ignoreZValue + "]"); - } - return zValue; - } - -} diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Line.java b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Line.java index c2c9cb4b83a18..20f4314246950 100644 --- a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Line.java +++ b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Line.java @@ -59,10 +59,6 @@ public Line(double[] lats, double[] lons, double[] alts) { if (alts != null && alts.length != lats.length) { throw new IllegalArgumentException("alts and lats must be equal length"); } - for (int i = 0; i < lats.length; i++) { - GeometryUtils.checkLatitude(lats[i]); - GeometryUtils.checkLongitude(lons[i]); - } } public int length() { diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Point.java b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Point.java index 248f433b96a13..88fd5eb06fe79 100644 --- a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Point.java +++ b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Point.java @@ -42,8 +42,6 @@ public Point(double lat, double lon) { } public Point(double lat, double lon, double alt) { - GeometryUtils.checkLatitude(lat); - GeometryUtils.checkLongitude(lon); this.lat = lat; this.lon = lon; this.alt = alt; diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Rectangle.java b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Rectangle.java index ca7ec2e57c98d..75ba25721e755 100644 --- a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Rectangle.java +++ b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Rectangle.java @@ -71,10 +71,6 @@ public Rectangle(double minLat, double maxLat, double minLon, double maxLon) { * Constructs a bounding box by first validating the provided latitude and longitude coordinates */ public Rectangle(double minLat, double maxLat, double minLon, double maxLon, double minAlt, double maxAlt) { - GeometryUtils.checkLatitude(minLat); - GeometryUtils.checkLatitude(maxLat); - GeometryUtils.checkLongitude(minLon); - GeometryUtils.checkLongitude(maxLon); this.minLon = minLon; this.maxLon = maxLon; this.minLat = minLat; @@ -90,17 +86,6 @@ public Rectangle(double minLat, double maxLat, double minLon, double maxLon, dou } } - public double getWidth() { - if (crossesDateline()) { - return GeometryUtils.MAX_LON_INCL - minLon + maxLon - GeometryUtils.MIN_LON_INCL; - } - return maxLon - minLon; - } - - public double getHeight() { - return maxLat - minLat; - } - public double getMinLat() { return minLat; } @@ -156,21 +141,6 @@ public String toString() { return b.toString(); } - /** - * Returns true if this bounding box crosses the dateline - */ - public boolean crossesDateline() { - return maxLon < minLon; - } - - /** returns true if rectangle (defined by minLat, maxLat, minLon, maxLon) contains the lat lon point */ - public boolean containsPoint(final double lat, final double lon) { - if (lat >= minLat && lat <= maxLat) { - return crossesDateline() ? lon >= minLon || lon <= maxLon : lon >= minLon && lon <= maxLon; - } - return false; - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/utils/GeographyValidator.java b/libs/geo/src/main/java/org/elasticsearch/geo/utils/GeographyValidator.java new file mode 100644 index 0000000000000..756792358abd4 --- /dev/null +++ b/libs/geo/src/main/java/org/elasticsearch/geo/utils/GeographyValidator.java @@ -0,0 +1,178 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.geo.utils; + +import org.elasticsearch.geo.geometry.Circle; +import org.elasticsearch.geo.geometry.Geometry; +import org.elasticsearch.geo.geometry.GeometryCollection; +import org.elasticsearch.geo.geometry.GeometryVisitor; +import org.elasticsearch.geo.geometry.Line; +import org.elasticsearch.geo.geometry.LinearRing; +import org.elasticsearch.geo.geometry.MultiLine; +import org.elasticsearch.geo.geometry.MultiPoint; +import org.elasticsearch.geo.geometry.MultiPolygon; +import org.elasticsearch.geo.geometry.Point; +import org.elasticsearch.geo.geometry.Polygon; +import org.elasticsearch.geo.geometry.Rectangle; + +/** + * Validator that checks that lats are between -90 and +90 and lons are between -180 and +180 and altitude is present only if + * ignoreZValue is set to true + */ +public class GeographyValidator implements GeometryValidator { + + /** + * Minimum longitude value. + */ + private static final double MIN_LON_INCL = -180.0D; + + /** + * Maximum longitude value. + */ + private static final double MAX_LON_INCL = 180.0D; + + /** + * Minimum latitude value. + */ + private static final double MIN_LAT_INCL = -90.0D; + + /** + * Maximum latitude value. + */ + private static final double MAX_LAT_INCL = 90.0D; + + private final boolean ignoreZValue; + + public GeographyValidator(boolean ignoreZValue) { + this.ignoreZValue = ignoreZValue; + } + + /** + * validates latitude value is within standard +/-90 coordinate bounds + */ + protected void checkLatitude(double latitude) { + if (Double.isNaN(latitude) || latitude < MIN_LAT_INCL || latitude > MAX_LAT_INCL) { + throw new IllegalArgumentException( + "invalid latitude " + latitude + "; must be between " + MIN_LAT_INCL + " and " + MAX_LAT_INCL); + } + } + + /** + * validates longitude value is within standard +/-180 coordinate bounds + */ + protected void checkLongitude(double longitude) { + if (Double.isNaN(longitude) || longitude < MIN_LON_INCL || longitude > MAX_LON_INCL) { + throw new IllegalArgumentException( + "invalid longitude " + longitude + "; must be between " + MIN_LON_INCL + " and " + MAX_LON_INCL); + } + } + + protected void checkAltitude(double zValue) { + if (ignoreZValue == false && Double.isNaN(zValue) == false) { + throw new IllegalArgumentException("found Z value [" + zValue + "] but [ignore_z_value] " + + "parameter is [" + ignoreZValue + "]"); + } + } + + @Override + public void validate(Geometry geometry) { + geometry.visit(new GeometryVisitor() { + + @Override + public Void visit(Circle circle) throws RuntimeException { + checkLatitude(circle.getLat()); + checkLongitude(circle.getLon()); + checkAltitude(circle.getAlt()); + return null; + } + + @Override + public Void visit(GeometryCollection collection) throws RuntimeException { + for (Geometry g : collection) { + g.visit(this); + } + return null; + } + + @Override + public Void visit(Line line) throws RuntimeException { + for (int i = 0; i < line.length(); i++) { + checkLatitude(line.getLat(i)); + checkLongitude(line.getLon(i)); + checkAltitude(line.getAlt(i)); + } + return null; + } + + @Override + public Void visit(LinearRing ring) throws RuntimeException { + for (int i = 0; i < ring.length(); i++) { + checkLatitude(ring.getLat(i)); + checkLongitude(ring.getLon(i)); + checkAltitude(ring.getAlt(i)); + } + return null; + } + + @Override + public Void visit(MultiLine multiLine) throws RuntimeException { + return visit((GeometryCollection) multiLine); + } + + @Override + public Void visit(MultiPoint multiPoint) throws RuntimeException { + return visit((GeometryCollection) multiPoint); + } + + @Override + public Void visit(MultiPolygon multiPolygon) throws RuntimeException { + return visit((GeometryCollection) multiPolygon); + } + + @Override + public Void visit(Point point) throws RuntimeException { + checkLatitude(point.getLat()); + checkLongitude(point.getLon()); + checkAltitude(point.getAlt()); + return null; + } + + @Override + public Void visit(Polygon polygon) throws RuntimeException { + polygon.getPolygon().visit(this); + for (int i = 0; i < polygon.getNumberOfHoles(); i++) { + polygon.getHole(i).visit(this); + } + return null; + } + + @Override + public Void visit(Rectangle rectangle) throws RuntimeException { + checkLatitude(rectangle.getMinLat()); + checkLatitude(rectangle.getMaxLat()); + checkLongitude(rectangle.getMinLon()); + checkLongitude(rectangle.getMaxLon()); + checkAltitude(rectangle.getMinAlt()); + checkAltitude(rectangle.getMaxAlt()); + return null; + } + }); + } +} diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/utils/GeometryValidator.java b/libs/geo/src/main/java/org/elasticsearch/geo/utils/GeometryValidator.java new file mode 100644 index 0000000000000..2caf6738ed469 --- /dev/null +++ b/libs/geo/src/main/java/org/elasticsearch/geo/utils/GeometryValidator.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.geo.utils; + +import org.elasticsearch.geo.geometry.Geometry; + +/** + * Generic geometry validator that can be used by the parser to verify the validity of the parsed geometry + */ +public interface GeometryValidator { + + /** + * Validates the geometry and throws IllegalArgumentException if the geometry is not valid + */ + void validate(Geometry geometry); + +} diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/utils/WellKnownText.java b/libs/geo/src/main/java/org/elasticsearch/geo/utils/WellKnownText.java index 007bb036cec85..4fd4bdb6fd150 100644 --- a/libs/geo/src/main/java/org/elasticsearch/geo/utils/WellKnownText.java +++ b/libs/geo/src/main/java/org/elasticsearch/geo/utils/WellKnownText.java @@ -22,7 +22,6 @@ import org.elasticsearch.geo.geometry.Circle; import org.elasticsearch.geo.geometry.Geometry; import org.elasticsearch.geo.geometry.GeometryCollection; -import org.elasticsearch.geo.geometry.GeometryUtils; import org.elasticsearch.geo.geometry.GeometryVisitor; import org.elasticsearch.geo.geometry.Line; import org.elasticsearch.geo.geometry.LinearRing; @@ -58,11 +57,11 @@ public class WellKnownText { private final String EOL = "END-OF-LINE"; private final boolean coerce; - private final boolean ignoreZValue; + private final GeometryValidator validator; - public WellKnownText(boolean coerce, boolean ignoreZValue) { + public WellKnownText(boolean coerce, GeometryValidator validator) { this.coerce = coerce; - this.ignoreZValue = ignoreZValue; + this.validator = validator; } public String toWKT(Geometry geometry) { @@ -243,7 +242,9 @@ public Geometry fromWKT(String wkt) throws IOException, ParseException { tokenizer.whitespaceChars('\r', '\r'); tokenizer.whitespaceChars('\n', '\n'); tokenizer.commentChar('#'); - return parseGeometry(tokenizer); + Geometry geometry = parseGeometry(tokenizer); + validator.validate(geometry); + return geometry; } finally { reader.close(); } @@ -297,7 +298,7 @@ private Point parsePoint(StreamTokenizer stream) throws IOException, ParseExcept double lat = nextNumber(stream); Point pt; if (isNumberNext(stream)) { - pt = new Point(lat, lon, GeometryUtils.checkAltitude(ignoreZValue, nextNumber(stream))); + pt = new Point(lat, lon, nextNumber(stream)); } else { pt = new Point(lat, lon); } @@ -318,7 +319,7 @@ private void parseCoordinate(StreamTokenizer stream, ArrayList lats, Arr lons.add(nextNumber(stream)); lats.add(nextNumber(stream)); if (isNumberNext(stream)) { - alts.add(GeometryUtils.checkAltitude(ignoreZValue, nextNumber(stream))); + alts.add(nextNumber(stream)); } if (alts.isEmpty() == false && alts.size() != lons.size()) { throw new ParseException("coordinate dimensions do not match: " + tokenString(stream), stream.lineno()); diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/BaseGeometryTestCase.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/BaseGeometryTestCase.java index 47d0f4285ad01..073bff4cb7575 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/BaseGeometryTestCase.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/BaseGeometryTestCase.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.test.AbstractWireTestCase; @@ -53,7 +54,7 @@ protected Writeable.Reader instanceReader() { @SuppressWarnings("unchecked") @Override protected T copyInstance(T instance, Version version) throws IOException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); String text = wkt.toWKT(instance); try { return (T) wkt.fromWKT(text); diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/CircleTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/CircleTests.java index 8bad65db616ca..e8912a39fb435 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/CircleTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/CircleTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; +import org.elasticsearch.geo.utils.GeometryValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -36,7 +38,7 @@ protected Circle createTestInstance(boolean hasAlt) { } public void testBasicSerialization() throws IOException, ParseException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); assertEquals("circle (20.0 10.0 15.0)", wkt.toWKT(new Circle(10, 20, 15))); assertEquals(new Circle(10, 20, 15), wkt.fromWKT("circle (20.0 10.0 15.0)")); @@ -48,13 +50,14 @@ public void testBasicSerialization() throws IOException, ParseException { } public void testInitValidation() { - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new Circle(10, 20, -1)); + GeometryValidator validator = new GeographyValidator(true); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Circle(10, 20, -1))); assertEquals("Circle radius [-1.0] cannot be negative", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new Circle(100, 20, 1)); + ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Circle(100, 20, 1))); assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new Circle(10, 200, 1)); + ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Circle(10, 200, 1))); assertEquals("invalid longitude 200.0; must be between -180.0 and 180.0", ex.getMessage()); } } diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/GeometryCollectionTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/GeometryCollectionTests.java index 905d0f3c1257d..c78c47dfbcd96 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/GeometryCollectionTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/GeometryCollectionTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -35,7 +36,7 @@ protected GeometryCollection createTestInstance(boolean hasAlt) { public void testBasicSerialization() throws IOException, ParseException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); assertEquals("geometrycollection (point (20.0 10.0),point EMPTY)", wkt.toWKT(new GeometryCollection(Arrays.asList(new Point(10, 20), Point.EMPTY)))); diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/GeometryValidatorTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/GeometryValidatorTests.java new file mode 100644 index 0000000000000..c747fc2df50a7 --- /dev/null +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/GeometryValidatorTests.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.geo.geometry; + +import org.elasticsearch.geo.utils.GeographyValidator; +import org.elasticsearch.geo.utils.GeometryValidator; +import org.elasticsearch.geo.utils.WellKnownText; +import org.elasticsearch.test.ESTestCase; + +public class GeometryValidatorTests extends ESTestCase { + + public static class NoopValidator implements GeometryValidator { + + @Override + public void validate(Geometry geometry) { + + } + } + + public static class OneValidator extends GeographyValidator { + /** + * Minimum longitude value. + */ + private static final double MIN_LON_INCL = -1D; + + /** + * Maximum longitude value. + */ + private static final double MAX_LON_INCL = 1D; + + /** + * Minimum latitude value. + */ + private static final double MIN_LAT_INCL = -1D; + + /** + * Maximum latitude value. + */ + private static final double MAX_LAT_INCL = 1D; + + /** + * Minimum altitude value. + */ + private static final double MIN_ALT_INCL = -1D; + + /** + * Maximum altitude value. + */ + private static final double MAX_ALT_INCL = 1D; + + public OneValidator() { + super(true); + } + + @Override + protected void checkLatitude(double latitude) { + if (Double.isNaN(latitude) || latitude < MIN_LAT_INCL || latitude > MAX_LAT_INCL) { + throw new IllegalArgumentException( + "invalid latitude " + latitude + "; must be between " + MIN_LAT_INCL + " and " + MAX_LAT_INCL); + } + } + + @Override + protected void checkLongitude(double longitude) { + if (Double.isNaN(longitude) || longitude < MIN_LON_INCL || longitude > MAX_LON_INCL) { + throw new IllegalArgumentException( + "invalid longitude " + longitude + "; must be between " + MIN_LON_INCL + " and " + MAX_LON_INCL); + } + } + + @Override + protected void checkAltitude(double zValue) { + if (Double.isNaN(zValue) == false && (zValue < MIN_ALT_INCL || zValue > MAX_ALT_INCL)) { + throw new IllegalArgumentException( + "invalid altitude " + zValue + "; must be between " + MIN_ALT_INCL + " and " + MAX_ALT_INCL); + } + } + } + + public void testNoopValidator() throws Exception { + WellKnownText parser = new WellKnownText(true, new NoopValidator()); + parser.fromWKT("CIRCLE (10000 20000 30000)"); + parser.fromWKT("POINT (10000 20000)"); + parser.fromWKT("LINESTRING (10000 20000, 0 0)"); + parser.fromWKT("POLYGON ((300 100, 400 200, 500 300, 300 100), (50 150, 250 150, 200 100))"); + parser.fromWKT("MULTIPOINT (10000 20000, 20000 30000)"); + } + + public void testOneValidator() throws Exception { + WellKnownText parser = new WellKnownText(true, new OneValidator()); + parser.fromWKT("POINT (0 1)"); + parser.fromWKT("POINT (0 1 0.5)"); + IllegalArgumentException ex; + ex = expectThrows(IllegalArgumentException.class, () -> parser.fromWKT("CIRCLE (1 2 3)")); + assertEquals("invalid latitude 2.0; must be between -1.0 and 1.0", ex.getMessage()); + ex = expectThrows(IllegalArgumentException.class, () -> parser.fromWKT("POINT (2 1)")); + assertEquals("invalid longitude 2.0; must be between -1.0 and 1.0", ex.getMessage()); + ex = expectThrows(IllegalArgumentException.class, () -> parser.fromWKT("LINESTRING (1 -1 0, 0 0 2)")); + assertEquals("invalid altitude 2.0; must be between -1.0 and 1.0", ex.getMessage()); + ex = expectThrows(IllegalArgumentException.class, () -> parser.fromWKT("POLYGON ((0.3 0.1, 0.4 0.2, 5 0.3, 0.3 0.1))")); + assertEquals("invalid longitude 5.0; must be between -1.0 and 1.0", ex.getMessage()); + ex = expectThrows(IllegalArgumentException.class, () -> parser.fromWKT( + "POLYGON ((0.3 0.1, 0.4 0.2, 0.5 0.3, 0.3 0.1), (0.5 1.5, 2.5 1.5, 2.0 1.0))")); + assertEquals("invalid latitude 1.5; must be between -1.0 and 1.0", ex.getMessage()); + ex = expectThrows(IllegalArgumentException.class, () -> parser.fromWKT("MULTIPOINT (0 1, -2 1)")); + assertEquals("invalid longitude -2.0; must be between -1.0 and 1.0", ex.getMessage()); + } + + +} diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/LineTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/LineTests.java index 0f59940f973f0..b9f8cb37f5422 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/LineTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/LineTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; +import org.elasticsearch.geo.utils.GeometryValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -31,7 +33,7 @@ protected Line createTestInstance(boolean hasAlt) { } public void testBasicSerialization() throws IOException, ParseException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); assertEquals("linestring (3.0 1.0, 4.0 2.0)", wkt.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4}))); assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}), wkt.fromWKT("linestring (3 1, 4 2)")); @@ -45,19 +47,23 @@ public void testBasicSerialization() throws IOException, ParseException { } public void testInitValidation() { - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new Line(new double[]{1}, new double[]{3})); + GeometryValidator validator = new GeographyValidator(true); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new Line(new double[]{1}, new double[]{3}))); assertEquals("at least two points in the line is required", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new Line(new double[]{1, 2, 3, 1}, new double[]{3, 4, 500, 3})); + ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new Line(new double[]{1, 2, 3, 1}, new double[]{3, 4, 500, 3}))); assertEquals("invalid longitude 500.0; must be between -180.0 and 180.0", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new Line(new double[]{1, 100, 3, 1}, new double[]{3, 4, 5, 3})); + ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new Line(new double[]{1, 100, 3, 1}, new double[]{3, 4, 5, 3}))); assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage()); } public void testWKTValidation() { IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> new WellKnownText(randomBoolean(), false).fromWKT("linestring (3 1 6, 4 2 5)")); + () -> new WellKnownText(randomBoolean(), new GeographyValidator(false)).fromWKT("linestring (3 1 6, 4 2 5)")); assertEquals("found Z value [6.0] but [ignore_z_value] parameter is [false]", ex.getMessage()); } } diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/LinearRingTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/LinearRingTests.java index 9327e2046d5fe..07e9e866233e7 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/LinearRingTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/LinearRingTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; +import org.elasticsearch.geo.utils.GeometryValidator; import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.test.ESTestCase; @@ -26,30 +28,35 @@ public class LinearRingTests extends ESTestCase { public void testBasicSerialization() { UnsupportedOperationException ex = expectThrows(UnsupportedOperationException.class, - () -> new WellKnownText(true, true).toWKT(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))); + () -> new WellKnownText(true, new GeographyValidator(true)) + .toWKT(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))); assertEquals("line ring cannot be serialized using WKT", ex.getMessage()); } public void testInitValidation() { + GeometryValidator validator = new GeographyValidator(true); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> new LinearRing(new double[]{1, 2, 3}, new double[]{3, 4, 5})); + () -> validator.validate(new LinearRing(new double[]{1, 2, 3}, new double[]{3, 4, 5}))); assertEquals("first and last points of the linear ring must be the same (it must close itself): lats[0]=1.0 lats[2]=3.0 " + "lons[0]=3.0 lons[2]=5.0", ex.getMessage()); ex = expectThrows(IllegalArgumentException.class, - () -> new LinearRing(new double[]{1, 2, 1}, new double[]{3, 4, 3}, new double[]{1, 2, 3})); + () -> validator.validate(new LinearRing(new double[]{1, 2, 1}, new double[]{3, 4, 3}, new double[]{1, 2, 3}))); assertEquals("first and last points of the linear ring must be the same (it must close itself): lats[0]=1.0 lats[2]=1.0 " + "lons[0]=3.0 lons[2]=3.0 alts[0]=1.0 alts[2]=3.0", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new LinearRing(new double[]{1}, new double[]{3})); + ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new LinearRing(new double[]{1}, new double[]{3}))); assertEquals("at least two points in the line is required", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 500, 3})); + ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 500, 3}))); assertEquals("invalid longitude 500.0; must be between -180.0 and 180.0", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new LinearRing(new double[]{1, 100, 3, 1}, new double[]{3, 4, 5, 3})); + ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new LinearRing(new double[]{1, 100, 3, 1}, new double[]{3, 4, 5, 3}))); assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage()); } diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiLineTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiLineTests.java index 22e0c4459a3f7..9ed782e65cc06 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiLineTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiLineTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -40,7 +41,7 @@ protected MultiLine createTestInstance(boolean hasAlt) { } public void testBasicSerialization() throws IOException, ParseException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); assertEquals("multilinestring ((3.0 1.0, 4.0 2.0))", wkt.toWKT( new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4}))))); assertEquals(new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4}))), diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiPointTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiPointTests.java index d3f8b5738cb9a..c170adf9c9411 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiPointTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiPointTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -41,7 +42,7 @@ protected MultiPoint createTestInstance(boolean hasAlt) { } public void testBasicSerialization() throws IOException, ParseException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); assertEquals("multipoint (2.0 1.0)", wkt.toWKT( new MultiPoint(Collections.singletonList(new Point(1, 2))))); assertEquals(new MultiPoint(Collections.singletonList(new Point(1 ,2))), diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiPolygonTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiPolygonTests.java index fb4d8821ac3f4..9918dfa546c82 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiPolygonTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/MultiPolygonTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -40,7 +41,7 @@ protected MultiPolygon createTestInstance(boolean hasAlt) { } public void testBasicSerialization() throws IOException, ParseException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); assertEquals("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))", wkt.toWKT(new MultiPolygon(Collections.singletonList( new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})))))); diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/PointTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/PointTests.java index 4b590a3beb563..82e8fc40e75e9 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/PointTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/PointTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; +import org.elasticsearch.geo.utils.GeometryValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -31,7 +33,7 @@ protected Point createTestInstance(boolean hasAlt) { } public void testBasicSerialization() throws IOException, ParseException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); assertEquals("point (20.0 10.0)", wkt.toWKT(new Point(10, 20))); assertEquals(new Point(10, 20), wkt.fromWKT("point (20.0 10.0)")); @@ -43,16 +45,17 @@ public void testBasicSerialization() throws IOException, ParseException { } public void testInitValidation() { - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new Point(100, 10)); + GeometryValidator validator = new GeographyValidator(true); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Point(100, 10))); assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new Point(10, 500)); + ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Point(10, 500))); assertEquals("invalid longitude 500.0; must be between -180.0 and 180.0", ex.getMessage()); } public void testWKTValidation() { IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> new WellKnownText(randomBoolean(), false).fromWKT("point (20.0 10.0 100.0)")); + () -> new WellKnownText(randomBoolean(), new GeographyValidator(false)).fromWKT("point (20.0 10.0 100.0)")); assertEquals("found Z value [100.0] but [ignore_z_value] parameter is [false]", ex.getMessage()); } } diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/PolygonTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/PolygonTests.java index 33a5325c87b8e..adbe1f38cdcc0 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/PolygonTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/PolygonTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -32,7 +33,7 @@ protected Polygon createTestInstance(boolean hasAlt) { } public void testBasicSerialization() throws IOException, ParseException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); assertEquals("polygon ((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0))", wkt.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})))); assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})), @@ -73,16 +74,17 @@ public void testInitValidation() { public void testWKTValidation() { IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> new WellKnownText(false, true).fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3))")); + () -> new WellKnownText(false, new GeographyValidator(true)).fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3))")); assertEquals("first and last points of the linear ring must be the same (it must close itself): " + "lats[0]=1.0 lats[2]=3.0 lons[0]=3.0 lons[2]=5.0 alts[0]=5.0 alts[2]=3.0", ex.getMessage()); ex = expectThrows(IllegalArgumentException.class, - () -> new WellKnownText(randomBoolean(), false).fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3, 3 1 5))")); + () -> new WellKnownText(randomBoolean(), new GeographyValidator(false)).fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3, 3 1 5))")); assertEquals("found Z value [5.0] but [ignore_z_value] parameter is [false]", ex.getMessage()); ex = expectThrows(IllegalArgumentException.class, - () -> new WellKnownText(false, randomBoolean()).fromWKT("polygon ((3 1, 4 2, 5 3, 3 1), (0.5 1.5, 2.5 1.5, 2.0 1.0))")); + () -> new WellKnownText(false, new GeographyValidator(randomBoolean())).fromWKT( + "polygon ((3 1, 4 2, 5 3, 3 1), (0.5 1.5, 2.5 1.5, 2.0 1.0))")); assertEquals("first and last points of the linear ring must be the same (it must close itself): " + "lats[0]=1.5 lats[2]=1.0 lons[0]=0.5 lons[2]=2.0", ex.getMessage()); } diff --git a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/RectangleTests.java b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/RectangleTests.java index afbf9f1ae8af6..8bd1494eb34a9 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geo/geometry/RectangleTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geo/geometry/RectangleTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.geo.geometry; +import org.elasticsearch.geo.utils.GeographyValidator; +import org.elasticsearch.geo.utils.GeometryValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -32,7 +34,7 @@ protected Rectangle createTestInstance(boolean hasAlt) { } public void testBasicSerialization() throws IOException, ParseException { - WellKnownText wkt = new WellKnownText(true, true); + WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); assertEquals("bbox (10.0, 20.0, 40.0, 30.0)", wkt.toWKT(new Rectangle(30, 40, 10, 20))); assertEquals(new Rectangle(30, 40, 10, 20), wkt.fromWKT("bbox (10.0, 20.0, 40.0, 30.0)")); @@ -41,16 +43,21 @@ public void testBasicSerialization() throws IOException, ParseException { } public void testInitValidation() { - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new Rectangle(100, 1, 2, 3)); + GeometryValidator validator = new GeographyValidator(true); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new Rectangle(1, 100, 2, 3))); assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new Rectangle(1, 2, 200, 3)); + ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new Rectangle(1, 2, 200, 3))); assertEquals("invalid longitude 200.0; must be between -180.0 and 180.0", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new Rectangle(2, 1, 2, 3)); + ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new Rectangle(2, 1, 2, 3))); assertEquals("max lat cannot be less than min lat", ex.getMessage()); - ex = expectThrows(IllegalArgumentException.class, () -> new Rectangle(1, 2, 2, 3, 5, Double.NaN)); + ex = expectThrows(IllegalArgumentException.class, + () -> validator.validate(new Rectangle(1, 2, 2, 3, 5, Double.NaN))); assertEquals("only one altitude value is specified", ex.getMessage()); } } diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoJson.java b/server/src/main/java/org/elasticsearch/common/geo/GeoJson.java index 4fcb20d60b9df..45db221a65dbf 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoJson.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoJson.java @@ -44,6 +44,7 @@ import org.elasticsearch.geo.geometry.Polygon; import org.elasticsearch.geo.geometry.Rectangle; import org.elasticsearch.geo.geometry.ShapeType; +import org.elasticsearch.geo.utils.GeometryValidator; import java.io.IOException; import java.util.ArrayList; @@ -66,18 +67,20 @@ public final class GeoJson { private final boolean rightOrientation; private final boolean coerce; - private final boolean ignoreZValue; + private final GeometryValidator validator; - public GeoJson(boolean rightOrientation, boolean coerce, boolean ignoreZValue) { + public GeoJson(boolean rightOrientation, boolean coerce, GeometryValidator validator) { this.rightOrientation = rightOrientation; this.coerce = coerce; - this.ignoreZValue = ignoreZValue; + this.validator = validator; } public Geometry fromXContent(XContentParser parser) throws IOException { try (XContentSubParser subParser = new XContentSubParser(parser)) { - return PARSER.apply(subParser, this); + Geometry geometry = PARSER.apply(subParser, this); + validator.validate(geometry); + return geometry; } } @@ -215,7 +218,7 @@ private XContentBuilder coordinatesToXContent(Polygon polygon) throws IOExceptio static { PARSER.declareString(constructorArg(), FIELD_TYPE); - PARSER.declareField(optionalConstructorArg(), (p, c) -> parseCoordinates(p, c.ignoreZValue), FIELD_COORDINATES, + PARSER.declareField(optionalConstructorArg(), (p, c) -> parseCoordinates(p), FIELD_COORDINATES, ObjectParser.ValueType.VALUE_ARRAY); PARSER.declareObjectArray(optionalConstructorArg(), PARSER, FIELD_GEOMETRIES); PARSER.declareString(optionalConstructorArg(), FIELD_ORIENTATION); @@ -298,20 +301,20 @@ private static void verifyNulls(String type, List geometries, Boolean * Recursive method which parses the arrays of coordinates used to define * Shapes */ - private static CoordinateNode parseCoordinates(XContentParser parser, boolean ignoreZValue) throws IOException { + private static CoordinateNode parseCoordinates(XContentParser parser) throws IOException { XContentParser.Token token = parser.nextToken(); // Base cases if (token != XContentParser.Token.START_ARRAY && token != XContentParser.Token.END_ARRAY && token != XContentParser.Token.VALUE_NULL) { - return new CoordinateNode(parseCoordinate(parser, ignoreZValue)); + return new CoordinateNode(parseCoordinate(parser)); } else if (token == XContentParser.Token.VALUE_NULL) { throw new IllegalArgumentException("coordinates cannot contain NULL values)"); } List nodes = new ArrayList<>(); while (token != XContentParser.Token.END_ARRAY) { - CoordinateNode node = parseCoordinates(parser, ignoreZValue); + CoordinateNode node = parseCoordinates(parser); if (nodes.isEmpty() == false && nodes.get(0).numDimensions() != node.numDimensions()) { throw new ElasticsearchParseException("Exception parsing coordinates: number of dimensions do not match"); } @@ -325,7 +328,7 @@ private static CoordinateNode parseCoordinates(XContentParser parser, boolean ig /** * Parser a singe set of 2 or 3 coordinates */ - private static Point parseCoordinate(XContentParser parser, boolean ignoreZValue) throws IOException { + private static Point parseCoordinate(XContentParser parser) throws IOException { // Add support for coerce here if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) { throw new ElasticsearchParseException("geo coordinates must be numbers"); @@ -339,7 +342,7 @@ private static Point parseCoordinate(XContentParser parser, boolean ignoreZValue // alt (for storing purposes only - future use includes 3d shapes) double alt = Double.NaN; if (token == XContentParser.Token.VALUE_NUMBER) { - alt = GeoPoint.assertZValue(ignoreZValue, parser.doubleValue()); + alt = parser.doubleValue(); parser.nextToken(); } // do not support > 3 dimensions diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeometryParser.java b/server/src/main/java/org/elasticsearch/common/geo/GeometryParser.java index fe06c3a9c33d2..e58372d825507 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeometryParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeometryParser.java @@ -22,6 +22,8 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.geo.geometry.Geometry; +import org.elasticsearch.geo.utils.GeographyValidator; +import org.elasticsearch.geo.utils.GeometryValidator; import org.elasticsearch.geo.utils.WellKnownText; import java.io.IOException; @@ -34,10 +36,12 @@ public final class GeometryParser { private final GeoJson geoJsonParser; private final WellKnownText wellKnownTextParser; + private final GeometryValidator validator; public GeometryParser(boolean rightOrientation, boolean coerce, boolean ignoreZValue) { - geoJsonParser = new GeoJson(rightOrientation, coerce, ignoreZValue); - wellKnownTextParser = new WellKnownText(coerce, ignoreZValue); + validator = new GeographyValidator(ignoreZValue); + geoJsonParser = new GeoJson(rightOrientation, coerce, validator); + wellKnownTextParser = new WellKnownText(coerce, validator); } /** @@ -50,7 +54,6 @@ public Geometry parse(XContentParser parser) throws IOException, } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { return geoJsonParser.fromXContent(parser); } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - // TODO: Add support for ignoreZValue and coerce to WKT return wellKnownTextParser.fromWKT(parser.text()); } throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates"); diff --git a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java index d19f7934c6567..9548d14cca9a1 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; import org.locationtech.jts.geom.Geometry; @@ -70,7 +71,7 @@ protected void assertGeometryEquals(Object expected, XContentBuilder geoJson, bo protected void assertGeometryEquals(org.elasticsearch.geo.geometry.Geometry expected, XContentBuilder geoJson) throws IOException { try (XContentParser parser = createParser(geoJson)) { parser.nextToken(); - assertEquals(expected, new GeoJson(true, false, false).fromXContent(parser)); + assertEquals(expected, new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); } } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java index 7442d3c8d803d..dfc01e4c64e16 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.geo.geometry.Rectangle; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -67,22 +66,6 @@ public void testInvalidReadFrom() throws Exception { } } - public void testDistanceCheck() { - // Note, is within is an approximation, so, even though 0.52 is outside 50mi, we still get "true" - double radius = DistanceUnit.convert(50, DistanceUnit.MILES, DistanceUnit.METERS); - org.apache.lucene.geo.Rectangle r = org.apache.lucene.geo.Rectangle.fromPointDistance(0, 0, radius); - Rectangle box = new Rectangle(r.minLat, r.maxLat, r.minLon, r.maxLon); - assertThat(box.containsPoint(0.5, 0.5), equalTo(true)); - assertThat(box.containsPoint(0.52, 0.52), equalTo(true)); - assertThat(box.containsPoint(1, 1), equalTo(false)); - - radius = DistanceUnit.convert(200, DistanceUnit.MILES, DistanceUnit.METERS); - r = org.apache.lucene.geo.Rectangle.fromPointDistance(0, 179, radius); - box = new Rectangle(r.minLat, r.maxLat, r.minLon, r.maxLon); - assertThat(box.containsPoint(0, -179), equalTo(true)); - assertThat(box.containsPoint(0, -178), equalTo(false)); - } - private static double arcDistance(GeoPoint p1, GeoPoint p2) { return GeoDistance.ARC.calculate(p1.lat(), p1.lon(), p2.lat(), p2.lon(), DistanceUnit.METERS); } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonParserTests.java index e095c7e381a82..4146adb2d299a 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonParserTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.geo.geometry.Point; import org.elasticsearch.geo.geometry.Polygon; import org.elasticsearch.geo.geometry.Rectangle; +import org.elasticsearch.geo.utils.GeographyValidator; import java.io.IOException; import java.util.Arrays; @@ -72,7 +73,7 @@ public void testParseLineString() throws IOException { Line expected = new Line(new double[] {0.0, 1.0}, new double[] { 100.0, 101.0}); try (XContentParser parser = createParser(lineGeoJson)) { parser.nextToken(); - assertEquals(expected, new GeoJson(false, false, true).fromXContent(parser)); + assertEquals(expected, new GeoJson(false, false, new GeographyValidator(true)).fromXContent(parser)); } } @@ -124,7 +125,7 @@ public void testParseMultiDimensionShapes() throws IOException { try (XContentParser parser = createParser(pointGeoJson)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(false, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -140,7 +141,7 @@ public void testParseMultiDimensionShapes() throws IOException { try (XContentParser parser = createParser(lineGeoJson)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(false, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } @@ -178,7 +179,7 @@ public void testParseEnvelope() throws IOException { .endObject(); try (XContentParser parser = createParser(multilinesGeoJson)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(false, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -189,7 +190,7 @@ public void testParseEnvelope() throws IOException { .endObject(); try (XContentParser parser = createParser(multilinesGeoJson)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(false, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } @@ -239,7 +240,7 @@ public void testParse3DPolygon() throws IOException { )); try (XContentParser parser = createParser(polygonGeoJson)) { parser.nextToken(); - assertEquals(expected, new GeoJson(true, false, true).fromXContent(parser)); + assertEquals(expected, new GeoJson(true, false, new GeographyValidator(true)).fromXContent(parser)); } } @@ -259,7 +260,7 @@ public void testInvalidDimensionalPolygon() throws IOException { .endObject(); try (XContentParser parser = createParser(polygonGeoJson)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, true).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(true)).fromXContent(parser)); assertNull(parser.nextToken()); } } @@ -275,7 +276,7 @@ public void testParseInvalidPoint() throws IOException { .endObject(); try (XContentParser parser = createParser(invalidPoint1)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -288,7 +289,7 @@ public void testParseInvalidPoint() throws IOException { .endObject(); try (XContentParser parser = createParser(invalidPoint2)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } @@ -302,7 +303,7 @@ public void testParseInvalidMultipoint() throws IOException { .endObject(); try (XContentParser parser = createParser(invalidMultipoint1)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -315,7 +316,7 @@ public void testParseInvalidMultipoint() throws IOException { .endObject(); try (XContentParser parser = createParser(invalidMultipoint2)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -329,7 +330,7 @@ public void testParseInvalidMultipoint() throws IOException { .endObject(); try (XContentParser parser = createParser(invalidMultipoint3)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } @@ -370,7 +371,7 @@ public void testParseInvalidDimensionalMultiPolygon() throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } @@ -391,7 +392,7 @@ public void testParseInvalidPolygon() throws IOException { .endObject()); try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -406,7 +407,7 @@ public void testParseInvalidPolygon() throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -421,7 +422,7 @@ public void testParseInvalidPolygon() throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -436,7 +437,7 @@ public void testParseInvalidPolygon() throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -449,7 +450,7 @@ public void testParseInvalidPolygon() throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -460,7 +461,7 @@ public void testParseInvalidPolygon() throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -473,7 +474,7 @@ public void testParseInvalidPolygon() throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } @@ -710,7 +711,7 @@ public void testParseInvalidShapes() throws IOException { try (XContentParser parser = createParser(tooLittlePointGeoJson)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } @@ -723,7 +724,7 @@ public void testParseInvalidShapes() throws IOException { try (XContentParser parser = createParser(emptyPointGeoJson)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertNull(parser.nextToken()); } } @@ -749,7 +750,7 @@ public void testParseInvalidGeometryCollectionShapes() throws IOException { parser.nextToken(); // foo parser.nextToken(); // start object parser.nextToken(); // start object - expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser)); + expectThrows(XContentParseException.class, () -> new GeoJson(true, false, new GeographyValidator(false)).fromXContent(parser)); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // end of the document assertNull(parser.nextToken()); // no more elements afterwards } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonSerializationTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonSerializationTests.java index b0ee969119c13..46766b4e11f4b 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonSerializationTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.geo.geometry.Point; import org.elasticsearch.geo.geometry.Polygon; import org.elasticsearch.geo.geometry.Rectangle; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.ESTestCase; @@ -49,7 +50,7 @@ public class GeoJsonSerializationTests extends ESTestCase { private static class GeometryWrapper implements ToXContentObject { private Geometry geometry; - private static GeoJson PARSER = new GeoJson(true, false, true); + private static GeoJson PARSER = new GeoJson(true, false, new GeographyValidator(true)); GeometryWrapper(Geometry geometry) { this.geometry = geometry; diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeometryParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeometryParserTests.java index 0d2b182741fc8..13b3f8f67b321 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeometryParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeometryParserTests.java @@ -61,7 +61,7 @@ public void testGeoJsonParsing() throws Exception { try (XContentParser parser = createParser(pointGeoJsonWithZ)) { parser.nextToken(); - expectThrows(XContentParseException.class, () -> new GeometryParser(true, randomBoolean(), false).parse(parser)); + expectThrows(IllegalArgumentException.class, () -> new GeometryParser(true, randomBoolean(), false).parse(parser)); } XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java index 5fd8f70e3691e..e297695152435 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java @@ -9,6 +9,7 @@ import org.elasticsearch.geo.geometry.Geometry; import org.elasticsearch.geo.geometry.Point; import org.elasticsearch.geo.geometry.ShapeType; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.search.SearchHit; @@ -126,7 +127,7 @@ public boolean supportsFromSource() { } private static class GeoShapeField extends FromSource { - private static final WellKnownText wkt = new WellKnownText(true, true); + private static final WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true)); GeoShapeField(String alias, String name) { super(alias, name); diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java index bb66a10418936..0a1c0826695bd 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.jdbc; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.xpack.sql.proto.StringUtils; @@ -54,7 +55,7 @@ */ final class TypeConverter { - private static WellKnownText WKT = new WellKnownText(true, true); + private static WellKnownText WKT = new WellKnownText(true, new GeographyValidator(true)); private TypeConverter() {} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java index 47a14e180fd18..256d7cb612cf0 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.geo.geometry.Geometry; import org.elasticsearch.geo.geometry.Point; +import org.elasticsearch.geo.utils.GeographyValidator; import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.xpack.sql.jdbc.EsType; import org.elasticsearch.xpack.sql.proto.StringUtils; @@ -51,7 +52,7 @@ public class JdbcAssert { private static final IntObjectHashMap SQL_TO_TYPE = new IntObjectHashMap<>(); - private static final WellKnownText WKT = new WellKnownText(true, true); + private static final WellKnownText WKT = new WellKnownText(true, new GeographyValidator(true)); static { for (EsType type : EsType.values()) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java index f9a025ea4f09a..1c3d1e7c9358b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java @@ -29,6 +29,8 @@ import org.elasticsearch.geo.geometry.Point; import org.elasticsearch.geo.geometry.Polygon; import org.elasticsearch.geo.geometry.Rectangle; +import org.elasticsearch.geo.utils.GeographyValidator; +import org.elasticsearch.geo.utils.GeometryValidator; import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; @@ -49,9 +51,11 @@ public class GeoShape implements ToXContentFragment, NamedWriteable { private final Geometry shape; + private static final GeometryValidator validator = new GeographyValidator(true); + private static final GeometryParser GEOMETRY_PARSER = new GeometryParser(true, true, true); - private static final WellKnownText WKT_PARSER = new WellKnownText(true, true); + private static final WellKnownText WKT_PARSER = new WellKnownText(true, validator); public GeoShape(double lon, double lat) { shape = new Point(lat, lon); From c75773745cd048cd81a58c7d8a74272b45a25cc6 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 27 Jun 2019 13:58:42 -0700 Subject: [PATCH 047/140] [DOCS] Updates ML APIs to use new API template (#43711) --- docs/reference/ml/apis/close-job.asciidoc | 19 +++--- .../ml/apis/delete-calendar-event.asciidoc | 17 +++--- .../ml/apis/delete-calendar-job.asciidoc | 21 +++---- .../ml/apis/delete-calendar.asciidoc | 15 ++--- .../ml/apis/delete-datafeed.asciidoc | 26 ++++---- .../ml/apis/delete-expired-data.asciidoc | 15 +++-- docs/reference/ml/apis/delete-filter.asciidoc | 15 ++--- .../ml/apis/delete-forecast.asciidoc | 34 ++++++----- docs/reference/ml/apis/delete-job.asciidoc | 22 +++---- .../ml/apis/delete-snapshot.asciidoc | 21 +++---- .../ml/apis/find-file-structure.asciidoc | 42 ++++++------- docs/reference/ml/apis/flush-job.asciidoc | 27 +++++---- docs/reference/ml/apis/forecast.asciidoc | 21 +++---- docs/reference/ml/apis/get-bucket.asciidoc | 43 ++++++------- .../ml/apis/get-calendar-event.asciidoc | 24 ++++---- docs/reference/ml/apis/get-calendar.asciidoc | 20 +++---- docs/reference/ml/apis/get-category.asciidoc | 30 +++++----- .../ml/apis/get-datafeed-stats.asciidoc | 18 +++--- docs/reference/ml/apis/get-datafeed.asciidoc | 18 +++--- docs/reference/ml/apis/get-filter.asciidoc | 20 +++---- .../reference/ml/apis/get-influencer.asciidoc | 36 +++++------ docs/reference/ml/apis/get-job-stats.asciidoc | 18 +++--- docs/reference/ml/apis/get-job.asciidoc | 16 ++--- docs/reference/ml/apis/get-ml-info.asciidoc | 18 +++--- .../ml/apis/get-overall-buckets.asciidoc | 39 ++++++------ docs/reference/ml/apis/get-record.asciidoc | 36 +++++------ docs/reference/ml/apis/get-snapshot.asciidoc | 30 +++++----- docs/reference/ml/apis/open-job.asciidoc | 25 ++++---- .../ml/apis/post-calendar-event.asciidoc | 24 ++++---- docs/reference/ml/apis/post-data.asciidoc | 33 +++++----- .../ml/apis/preview-datafeed.asciidoc | 38 ++++++------ .../ml/apis/put-calendar-job.asciidoc | 22 +++---- docs/reference/ml/apis/put-calendar.asciidoc | 18 +++--- docs/reference/ml/apis/put-datafeed.asciidoc | 60 +++++++++---------- docs/reference/ml/apis/put-filter.asciidoc | 20 +++---- docs/reference/ml/apis/put-job.asciidoc | 40 ++++++------- .../ml/apis/revert-snapshot.asciidoc | 24 ++++---- .../ml/apis/set-upgrade-mode.asciidoc | 18 +++--- .../reference/ml/apis/start-datafeed.asciidoc | 35 +++++------ docs/reference/ml/apis/stop-datafeed.asciidoc | 26 ++++---- .../ml/apis/update-datafeed.asciidoc | 51 ++++++++-------- docs/reference/ml/apis/update-filter.asciidoc | 27 +++++---- docs/reference/ml/apis/update-job.asciidoc | 19 +++--- .../ml/apis/update-snapshot.asciidoc | 29 ++++----- .../ml/apis/validate-detector.asciidoc | 14 ++--- docs/reference/ml/apis/validate-job.asciidoc | 14 ++--- 46 files changed, 603 insertions(+), 595 deletions(-) diff --git a/docs/reference/ml/apis/close-job.asciidoc b/docs/reference/ml/apis/close-job.asciidoc index fa96b18777d42..8eb78cff0064e 100644 --- a/docs/reference/ml/apis/close-job.asciidoc +++ b/docs/reference/ml/apis/close-job.asciidoc @@ -22,6 +22,13 @@ operations, but you can still explore and navigate results. `POST _ml/anomaly_detectors/_all/_close` + +[[ml-close-job-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-close-job-desc]] ==== {api-description-title} @@ -52,27 +59,21 @@ results the job might have recently produced or might produce in the future. [[ml-close-job-path-parms]] ==== {api-path-parms-title} -`job_id`:: +`` (Required):: (string) Identifier for the job. It can be a job identifier, a group name, or a wildcard expression. [[ml-close-job-query-parms]] ==== {api-query-parms-title} -`force`:: +`force` (Optional):: (boolean) Use to close a failed job, or to forcefully close a job which has not responded to its initial close request. -`timeout`:: +`timeout` (Optional):: (time units) Controls the time to wait until a job has closed. The default value is 30 minutes. -[[ml-close-job-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {stack-ov}/security-privileges.html[Security privileges]. - [[ml-close-job-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/delete-calendar-event.asciidoc b/docs/reference/ml/apis/delete-calendar-event.asciidoc index bc99398991bd6..0aa9ce5cc8d92 100644 --- a/docs/reference/ml/apis/delete-calendar-event.asciidoc +++ b/docs/reference/ml/apis/delete-calendar-event.asciidoc @@ -13,6 +13,13 @@ Deletes scheduled events from a calendar. `DELETE _ml/calendars//events/` +[[ml-delete-calendar-event-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-delete-calendar-event-desc]] ==== {api-description-title} @@ -23,19 +30,13 @@ events and delete the calendar, see the [[ml-delete-calendar-event-path-parms]] ==== {api-path-parms-title} -`calendar_id`(required):: +`` (Required):: (string) Identifier for the calendar. -`event_id` (required):: +`` (Required):: (string) Identifier for the scheduled event. You can obtain this identifier by using the <>. -[[ml-delete-calendar-event-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {stack-ov}/security-privileges.html[Security privileges]. - [[ml-delete-calendar-event-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/delete-calendar-job.asciidoc b/docs/reference/ml/apis/delete-calendar-job.asciidoc index 9451734c2304f..a555b3d3b922b 100644 --- a/docs/reference/ml/apis/delete-calendar-job.asciidoc +++ b/docs/reference/ml/apis/delete-calendar-job.asciidoc @@ -13,21 +13,22 @@ Deletes jobs from a calendar. `DELETE _ml/calendars//jobs/` +[[ml-delete-calendar-job-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-delete-calendar-job-path-parms]] ==== {api-path-parms-title} -`calendar_id`(required):: +`` (Required):: (string) Identifier for the calendar. -`job_id` (required):: - (string) An identifier for the job. It can be a job identifier, a group name, or a - comma-separated list of jobs or groups. - -[[ml-delete-calendar-job-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {stack-ov}/security-privileges.html[Security privileges]. +`` (Required):: + (string) An identifier for the job. It can be a job identifier, a group name, + or a comma-separated list of jobs or groups. [[ml-delete-calendar-job-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/delete-calendar.asciidoc b/docs/reference/ml/apis/delete-calendar.asciidoc index c07eb37c93dfc..065c117c49c63 100644 --- a/docs/reference/ml/apis/delete-calendar.asciidoc +++ b/docs/reference/ml/apis/delete-calendar.asciidoc @@ -13,6 +13,13 @@ Deletes a calendar. `DELETE _ml/calendars/` +[[ml-delete-calendar-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-delete-calendar-desc]] ==== {api-description-title} @@ -22,15 +29,9 @@ calendar. [[ml-delete-calendar-path-parms]] ==== {api-path-parms-title} -`calendar_id` (required):: +`` (Required):: (string) Identifier for the calendar. -[[ml-delete-calendar-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {stack-ov}/security-privileges.html[Security privileges]. - [[ml-delete-calendar-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/delete-datafeed.asciidoc b/docs/reference/ml/apis/delete-datafeed.asciidoc index 9686959427daa..23917bf9e3365 100644 --- a/docs/reference/ml/apis/delete-datafeed.asciidoc +++ b/docs/reference/ml/apis/delete-datafeed.asciidoc @@ -15,29 +15,31 @@ Deletes an existing {dfeed}. `DELETE _ml/datafeeds/` +[[ml-delete-datafeed-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-delete-datafeed-desc]] ==== {api-description-title} -NOTE: Unless the `force` parameter is used, the {dfeed} must be stopped before it can be deleted. +NOTE: Unless you use the `force` parameter, you must stop the {dfeed} before you +can delete it. [[ml-delete-datafeed-path-parms]] ==== {api-path-parms-title} -`feed_id` (required):: - (string) Identifier for the {dfeed} +`` (Required):: + (string) Identifier for the {dfeed}. [[ml-delete-datafeed-query-parms]] ==== {api-query-parms-title} -`force`:: - (boolean) Use to forcefully delete a started {dfeed}; this method is quicker than - stopping and deleting the {dfeed}. - -[[ml-delete-datafeed-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {stack-ov}/security-privileges.html[Security privileges]. +`force` (Optional):: + (boolean) Use to forcefully delete a started {dfeed}; this method is quicker + than stopping and deleting the {dfeed}. [[ml-delete-datafeed-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/delete-expired-data.asciidoc b/docs/reference/ml/apis/delete-expired-data.asciidoc index 56ca1871329ee..ada9ec1c8c34e 100644 --- a/docs/reference/ml/apis/delete-expired-data.asciidoc +++ b/docs/reference/ml/apis/delete-expired-data.asciidoc @@ -13,6 +13,13 @@ Deletes expired and unused machine learning data. `DELETE _ml/_delete_expired_data` +[[ml-delete-expired-data-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-delete-expired-data-desc]] ==== {api-description-title} @@ -20,14 +27,6 @@ Deletes all job results, model snapshots and forecast data that have exceeded their `retention days` period. Machine learning state documents that are not associated with any job are also deleted. -[[ml-delete-expired-data-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security Privileges] and -{stack-ov}/built-in-roles.html[Built-in Roles]. - [[ml-delete-expired-data-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/delete-filter.asciidoc b/docs/reference/ml/apis/delete-filter.asciidoc index 8d6797448ec96..1962db29ad74e 100644 --- a/docs/reference/ml/apis/delete-filter.asciidoc +++ b/docs/reference/ml/apis/delete-filter.asciidoc @@ -13,6 +13,13 @@ Deletes a filter. `DELETE _ml/filters/` +[[ml-delete-filter-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-delete-filter-desc]] ==== {api-description-title} @@ -23,15 +30,9 @@ update or delete the job before you can delete the filter. [[ml-delete-filter-path-parms]] ==== {api-path-parms-title} -`filter_id` (required):: +`` (Required):: (string) Identifier for the filter. -[[ml-delete-filter-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. - [[ml-delete-filter-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/delete-forecast.asciidoc b/docs/reference/ml/apis/delete-forecast.asciidoc index 8332d07f84041..aac054217fced 100644 --- a/docs/reference/ml/apis/delete-forecast.asciidoc +++ b/docs/reference/ml/apis/delete-forecast.asciidoc @@ -17,47 +17,51 @@ Deletes forecasts from a {ml} job. `DELETE _ml/anomaly_detectors//_forecast/_all` +[[ml-delete-forecast-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-delete-forecast-desc]] ==== {api-description-title} By default, forecasts are retained for 14 days. You can specify a different -retention period with the `expires_in` parameter in the <>. The delete forecast API enables you to delete one or more forecasts before they expire. +retention period with the `expires_in` parameter in the +<>. The delete forecast API enables you to delete +one or more forecasts before they expire. -NOTE: When you delete a job its associated forecasts are deleted. +NOTE: When you delete a job, its associated forecasts are deleted. -For more information, see {stack-ov}/ml-overview.html#ml-forecasting[Forecasting the Future]. +For more information, see +{stack-ov}/ml-overview.html#ml-forecasting[Forecasting the future]. [[ml-delete-forecast-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: +`` (Required):: (string) Identifier for the job. -`forecast_id`:: +`forecast_id` (Optional):: (string) A comma-separated list of forecast identifiers. If you do not specify this optional parameter or if you specify `_all`, the API deletes all forecasts from the job. -[[ml-delete-forecast-request-body]] -==== {api-request-body-title} +[[ml-delete-forecast-query-parms]] +==== {api-query-parms-title} -`allow_no_forecasts`:: +`allow_no_forecasts` (Optional):: (boolean) Specifies whether an error occurs when there are no forecasts. In particular, if this parameter is set to `false` and there are no forecasts associated with the job, attempts to delete all forecasts return an error. The default value is `true`. -`timeout`:: +`timeout` (Optional):: (time units) Specifies the period of time to wait for the completion of the delete operation. When this period of time elapses, the API fails and returns an error. The default value is `30s`. For more information about time units, see <>. - -[[ml-delete-forecast-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {stack-ov}/security-privileges.html[Security Privileges]. [[ml-delete-forecast-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/delete-job.asciidoc b/docs/reference/ml/apis/delete-job.asciidoc index 94042ba356559..efd172ef5fb60 100644 --- a/docs/reference/ml/apis/delete-job.asciidoc +++ b/docs/reference/ml/apis/delete-job.asciidoc @@ -13,6 +13,13 @@ Deletes an existing anomaly detection job. `DELETE _ml/anomaly_detectors/` +[[ml-delete-job-prereqs]] +==== {api-prereq-title} + +* If {es} {security-features} are enabled, you must have `manage_ml` or `manage` +cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-delete-job-desc]] ==== {api-description-title} @@ -33,27 +40,20 @@ separated list. [[ml-delete-job-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: - (string) Identifier for the job +`` (Required):: + (string) Identifier for the job. [[ml-delete-job-query-parms]] ==== {api-query-parms-title} -`force`:: +`force` (Optional):: (boolean) Use to forcefully delete an opened job; this method is quicker than closing and deleting the job. -`wait_for_completion`:: +`wait_for_completion` (Optional):: (boolean) Specifies whether the request should return immediately or wait until the job deletion completes. Defaults to `true`. -[[ml-delete-job-prereqs]] -==== {api-prereq-title} - -If {es} {security-features} are enabled, you must have `manage_ml`, or `manage` -cluster privileges to use this API. -For more information, see {stack-ov}/security-privileges.html[Security Privileges]. - [[ml-delete-job-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/delete-snapshot.asciidoc b/docs/reference/ml/apis/delete-snapshot.asciidoc index 461f7fb422756..0e696f2a01139 100644 --- a/docs/reference/ml/apis/delete-snapshot.asciidoc +++ b/docs/reference/ml/apis/delete-snapshot.asciidoc @@ -13,6 +13,13 @@ Deletes an existing model snapshot. `DELETE _ml/anomaly_detectors//model_snapshots/` +[[ml-delete-snapshot-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-delete-snapshot-desc]] ==== {api-description-title} @@ -23,17 +30,11 @@ the `model_snapshot_id` in the results from the get jobs API. [[ml-delete-snapshot-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: - (string) Identifier for the job - -`snapshot_id` (required):: - (string) Identifier for the model snapshot - -[[ml-delete-snapshot-prereqs]] -==== {api-prereq-title} +`` (Required):: + (string) Identifier for the job. -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. +`` (Required):: + (string) Identifier for the model snapshot. [[ml-delete-snapshot-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/find-file-structure.asciidoc b/docs/reference/ml/apis/find-file-structure.asciidoc index ead3087f3d86c..212e80c7e1bd2 100644 --- a/docs/reference/ml/apis/find-file-structure.asciidoc +++ b/docs/reference/ml/apis/find-file-structure.asciidoc @@ -16,6 +16,13 @@ suitable to be ingested into {es}. `POST _ml/find_file_structure` +[[ml-find-file-structure-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml` or +`monitor` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-find-file-structure-desc]] ==== {api-description-title} @@ -51,36 +58,36 @@ chosen. [[ml-find-file-structure-query-parms]] ==== {api-query-parms-title} -`charset`:: +`charset` (Optional):: (string) The file's character set. It must be a character set that is supported by the JVM that {es} uses. For example, `UTF-8`, `UTF-16LE`, `windows-1252`, or `EUC-JP`. If this parameter is not specified, the structure finder chooses an appropriate character set. -`column_names`:: +`column_names` (Optional):: (string) If you have set `format` to `delimited`, you can specify the column names in a comma-separated list. If this parameter is not specified, the structure finder uses the column names from the header row of the file. If the file does not have a header role, columns are named "column1", "column2", "column3", etc. -`delimiter`:: +`delimiter` (Optional):: (string) If you have set `format` to `delimited`, you can specify the character used to delimit the values in each row. Only a single character is supported; the delimiter cannot have multiple characters. If this parameter is not specified, the structure finder considers the following possibilities: comma, tab, semi-colon, and pipe (`|`). -`explain`:: +`explain` (Optional):: (boolean) If this parameter is set to `true`, the response includes a field named `explanation`, which is an array of strings that indicate how the structure finder produced its result. The default value is `false`. -`format`:: +`format` (Optional):: (string) The high level structure of the file. Valid values are `ndjson`, `xml`, `delimited`, and `semi_structured_text`. If this parameter is not specified, the structure finder chooses one. -`grok_pattern`:: +`grok_pattern` (Optional):: (string) If you have set `format` to `semi_structured_text`, you can specify a Grok pattern that is used to extract fields from every message in the file. The name of the timestamp field in the Grok pattern must match what is specified @@ -88,20 +95,20 @@ chosen. name of the timestamp field in the Grok pattern must match "timestamp". If `grok_pattern` is not specified, the structure finder creates a Grok pattern. -`has_header_row`:: +`has_header_row` (Optional):: (boolean) If you have set `format` to `delimited`, you can use this parameter to indicate whether the column names are in the first row of the file. If this parameter is not specified, the structure finder guesses based on the similarity of the first row of the file to other rows. -`line_merge_size_limit`:: +`line_merge_size_limit` (Optional):: (unsigned integer) The maximum number of characters in a message when lines are merged to form messages while analyzing semi-structured files. The default is 10000. If you have extremely long messages you may need to increase this, but be aware that this may lead to very long processing times if the way to group lines into messages is misdetected. -`lines_to_sample`:: +`lines_to_sample` (Optional):: (unsigned integer) The number of lines to include in the structural analysis, starting from the beginning of the file. The minimum is 2; the default is 1000. If the value of this parameter is greater than the number of lines in @@ -117,7 +124,7 @@ efficient to upload a sample file with more variety in the first 1000 lines than to request analysis of 100000 lines to achieve some variety. -- -`quote`:: +`quote` (Optional):: (string) If you have set `format` to `delimited`, you can specify the character used to quote the values in each row if they contain newlines or the delimiter character. Only a single character is supported. If this parameter is not @@ -125,18 +132,18 @@ to request analysis of 100000 lines to achieve some variety. format does not use quoting, a workaround is to set this argument to a character that does not appear anywhere in the sample. -`should_trim_fields`:: +`should_trim_fields` (Optional):: (boolean) If you have set `format` to `delimited`, you can specify whether values between delimiters should have whitespace trimmed from them. If this parameter is not specified and the delimiter is pipe (`|`), the default value is `true`. Otherwise, the default value is `false`. -`timeout`:: +`timeout` (Optional):: (time) Sets the maximum amount of time that the structure analysis make take. If the analysis is still running when the timeout expires then it will be aborted. The default value is 25 seconds. -`timestamp_field`:: +`timestamp_field` (Optional):: (string) The name of the field that contains the primary timestamp of each record in the file. In particular, if the file were ingested into an index, this is the field that would be used to populate the `@timestamp` field. + @@ -155,7 +162,7 @@ field (if any) is the primary timestamp field. For structured file formats, it is not compulsory to have a timestamp in the file. -- -`timestamp_format`:: +`timestamp_format` (Optional):: (string) The Java time format of the timestamp field in the file. + + -- @@ -207,13 +214,6 @@ be ingested into {es}. It does not need to be in JSON format and it does not need to be UTF-8 encoded. The size is limited to the {es} HTTP receive buffer size, which defaults to 100 Mb. -[[ml-find-file-structure-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, or `monitor` cluster privileges to use this API. -For more information, see {stack-ov}/security-privileges.html[Security Privileges]. - - [[ml-find-file-structure-examples]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/flush-job.asciidoc b/docs/reference/ml/apis/flush-job.asciidoc index 6598f8155b931..590f866ca1799 100644 --- a/docs/reference/ml/apis/flush-job.asciidoc +++ b/docs/reference/ml/apis/flush-job.asciidoc @@ -13,6 +13,13 @@ Forces any buffered data to be processed by the job. `POST _ml/anomaly_detectors//_flush` +[[ml-flush-job-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-flush-job-desc]] ==== {api-description-title} @@ -29,39 +36,33 @@ opened again before analyzing further data. [[ml-flush-job-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: -(string) Identifier for the job +`` (Required):: +(string) Identifier for the job. [[ml-flush-job-query-parms]] ==== {api-query-parms-title} -`advance_time`:: +`advance_time` (Optional):: (string) Specifies to advance to a particular time value. Results are generated and the model is updated for data from the specified time interval. -`calc_interim`:: +`calc_interim` (Optional):: (boolean) If true, calculates the interim results for the most recent bucket or all buckets within the latency period. -`end`:: +`end` (Optional):: (string) When used in conjunction with `calc_interim`, specifies the range of buckets on which to calculate interim results. -`skip_time`:: +`skip_time` (Optional):: (string) Specifies to skip to a particular time value. Results are not generated and the model is not updated for data from the specified time interval. -`start`:: +`start` (Optional):: (string) When used in conjunction with `calc_interim`, specifies the range of buckets on which to calculate interim results. -[[ml-flush-job-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. - [[ml-flush-job-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/forecast.asciidoc b/docs/reference/ml/apis/forecast.asciidoc index 05bd250975dfb..d137b2e1be3ce 100644 --- a/docs/reference/ml/apis/forecast.asciidoc +++ b/docs/reference/ml/apis/forecast.asciidoc @@ -13,10 +13,17 @@ Predicts the future behavior of a time series by using its historical behavior. `POST _ml/anomaly_detectors//_forecast` +[[ml-forecast-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-forecast-desc]] ==== {api-description-title} -See {stack-ov}/ml-overview.html#ml-forecasting[Forecasting the Future]. +See {stack-ov}/ml-overview.html#ml-forecasting[Forecasting the future]. [NOTE] =============================== @@ -29,30 +36,24 @@ forecast. For more information about this property, see <>. [[ml-forecast-path-parms]] ==== {api-path-parms-title} -`job_id`:: +`` (Required):: (string) Identifier for the job. [[ml-forecast-request-body]] ==== {api-request-body-title} -`duration`:: +`duration` (Optional):: (time units) A period of time that indicates how far into the future to forecast. For example, `30d` corresponds to 30 days. The default value is 1 day. The forecast starts at the last record that was processed. For more information about time units, see <>. -`expires_in`:: +`expires_in` (Optional):: (time units) The period of time that forecast results are retained. After a forecast expires, the results are deleted. The default value is 14 days. If set to a value of `0`, the forecast is never automatically deleted. For more information about time units, see <>. -[[ml-forecast-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. - [[ml-forecast-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-bucket.asciidoc b/docs/reference/ml/apis/get-bucket.asciidoc index 0e2b7988e8ead..2a73d0f5d3538 100644 --- a/docs/reference/ml/apis/get-bucket.asciidoc +++ b/docs/reference/ml/apis/get-bucket.asciidoc @@ -15,6 +15,17 @@ Retrieves job results for one or more buckets. `GET _ml/anomaly_detectors//results/buckets/` +[[ml-get-bucket-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also +need `read` index privilege on the index that stores the results. The +`machine_learning_admin` and `machine_learning_user` roles provide these +privileges. For more information, see +{stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. + [[ml-get-bucket-desc]] ==== {api-description-title} @@ -24,44 +35,44 @@ bucket. [[ml-get-bucket-path-parms]] ==== {api-path-parms-title} -`job_id`:: +`` (Required):: (string) Identifier for the job -`timestamp`:: +`` (Optional):: (string) The timestamp of a single bucket result. - If you do not specify this optional parameter, the API returns information + If you do not specify this parameter, the API returns information about all buckets. [[ml-get-bucket-request-body]] ==== {api-request-body-title} -`anomaly_score`:: +`anomaly_score` (Optional):: (double) Returns buckets with anomaly scores greater or equal than this value. -`desc`:: +`desc` (Optional):: (boolean) If true, the buckets are sorted in descending order. -`end`:: +`end` (Optional):: (string) Returns buckets with timestamps earlier than this time. -`exclude_interim`:: +`exclude_interim` (Optional):: (boolean) If true, the output excludes interim results. By default, interim results are included. -`expand`:: +`expand` (Optional):: (boolean) If true, the output includes anomaly records. -`page`:: +`page` (Optional):: `from`::: (integer) Skips the specified number of buckets. `size`::: (integer) Specifies the maximum number of buckets to obtain. -`sort`:: +`sort` (Optional):: (string) Specifies the sort field for the requested buckets. By default, the buckets are sorted by the `timestamp` field. -`start`:: +`start` (Optional):: (string) Returns buckets with timestamps after this time. [[ml-get-bucket-results]] @@ -73,16 +84,6 @@ The API returns the following information: (array) An array of bucket objects. For more information, see <>. -[[ml-get-bucket-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. You also need `read` index privilege on the index -that stores the results. The `machine_learning_admin` and `machine_learning_user` -roles provide these privileges. For more information, see -{stack-ov}/security-privileges.html[Security Privileges] and -{stack-ov}/built-in-roles.html[Built-in Roles]. - [[ml-get-bucket-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-calendar-event.asciidoc b/docs/reference/ml/apis/get-calendar-event.asciidoc index 1ee94eff7b5c6..173a249488684 100644 --- a/docs/reference/ml/apis/get-calendar-event.asciidoc +++ b/docs/reference/ml/apis/get-calendar-event.asciidoc @@ -16,6 +16,13 @@ calendars. `GET _ml/calendars/_all/events` +[[ml-get-calendar-event-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-get-calendar-event-desc]] ==== {api-description-title} @@ -25,22 +32,22 @@ calendars by using `_all`. [[ml-get-calendar-event-path-parms]] ==== {api-path-parms-title} -`calendar_id` (required):: +`` (Required):: (string) Identifier for the calendar. [[ml-get-calendar-event-request-body]] ==== {api-request-body-title} -`end`:: +`end` (Optional):: (string) Specifies to get events with timestamps earlier than this time. -`from`:: +`from` (Optional):: (integer) Skips the specified number of events. -`size`:: +`size` (Optional):: (integer) Specifies the maximum number of events to obtain. -`start`:: +`start` (Optional):: (string) Specifies to get events with timestamps after this time. [[ml-get-calendar-event-results]] @@ -52,13 +59,6 @@ The API returns the following information: (array) An array of scheduled event resources. For more information, see <>. -[[ml-get-calendar-event-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - [[ml-get-calendar-event-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-calendar.asciidoc b/docs/reference/ml/apis/get-calendar.asciidoc index 1ff9f8442c28e..3d55f825bdb86 100644 --- a/docs/reference/ml/apis/get-calendar.asciidoc +++ b/docs/reference/ml/apis/get-calendar.asciidoc @@ -15,6 +15,13 @@ Retrieves configuration information for calendars. `GET _ml/calendars/_all` +[[ml-get-calendar-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-get-calendar-desc]] ==== {api-description-title} @@ -24,17 +31,17 @@ You can get information for a single calendar or for all calendars by using [[ml-get-calendar-path-parms]] ==== {api-path-parms-title} -`calendar_id`:: +`` (Required):: (string) Identifier for the calendar. [[ml-get-calendar-request-body]] ==== {api-request-body-title} -`page`:: +`page` (Optional):: `from`::: (integer) Skips the specified number of calendars. -`size`::: +`size` (Optional)::: (integer) Specifies the maximum number of calendars to obtain. [[ml-get-calendar-results]] @@ -46,13 +53,6 @@ The API returns the following information: (array) An array of calendar resources. For more information, see <>. -[[ml-get-calendar-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - [[ml-get-calendar-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-category.asciidoc b/docs/reference/ml/apis/get-category.asciidoc index 252f59c3ef205..6301eaf13a538 100644 --- a/docs/reference/ml/apis/get-category.asciidoc +++ b/docs/reference/ml/apis/get-category.asciidoc @@ -15,26 +15,36 @@ Retrieves job results for one or more categories. `GET _ml/anomaly_detectors//results/categories/` +[[ml-get-category-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also +need `read` index privilege on the index that stores the results. The +`machine_learning_admin` and `machine_learning_user` roles provide these +privileges. See {stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. + [[ml-get-category-desc]] ==== {api-description-title} For more information about categories, see -{stack-ov}/ml-configuring-categories.html[Categorizing Log Messages]. +{stack-ov}/ml-configuring-categories.html[Categorizing log messages]. [[ml-get-category-path-parms]] ==== {api-path-parms-title} -`job_id`:: +`` (Required):: (string) Identifier for the job. -`category_id`:: - (long) Identifier for the category. If you do not specify this optional parameter, +`` (Optional):: + (long) Identifier for the category. If you do not specify this parameter, the API returns information about all categories in the job. [[ml-get-category-request-body]] ==== {api-request-body-title} -`page`:: +`page` (Optional):: `from`::: (integer) Skips the specified number of categories. `size`::: @@ -49,16 +59,6 @@ The API returns the following information: (array) An array of category objects. For more information, see <>. -[[ml-get-category-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. You also need `read` index privilege on the index -that stores the results. The `machine_learning_admin` and `machine_learning_user` -roles provide these privileges. For more information, see -{stack-ov}/security-privileges.html[Security Privileges] and -{stack-ov}/built-in-roles.html[Built-in Roles]. - [[ml-get-category-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-datafeed-stats.asciidoc b/docs/reference/ml/apis/get-datafeed-stats.asciidoc index 1789478e081e9..6ce99785912af 100644 --- a/docs/reference/ml/apis/get-datafeed-stats.asciidoc +++ b/docs/reference/ml/apis/get-datafeed-stats.asciidoc @@ -19,7 +19,14 @@ Retrieves usage information for {dfeeds}. `GET _ml/datafeeds/_stats` + -`GET _ml/datafeeds/_all/_stats` + +`GET _ml/datafeeds/_all/_stats` + +[[ml-get-datafeed-stats-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. [[ml-get-datafeed-stats-desc]] ==== {api-description-title} @@ -37,7 +44,7 @@ IMPORTANT: This API returns a maximum of 10,000 {dfeeds}. [[ml-get-datafeed-stats-path-parms]] ==== {api-path-parms-title} -`feed_id`:: +`` (Optional):: (string) Identifier for the {dfeed}. It can be a {dfeed} identifier or a wildcard expression. If you do not specify one of these options, the API returns statistics for all {dfeeds}. @@ -51,13 +58,6 @@ The API returns the following information: (array) An array of {dfeed} count objects. For more information, see <>. -[[ml-get-datafeed-stats-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - [[ml-get-datafeed-stats-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-datafeed.asciidoc b/docs/reference/ml/apis/get-datafeed.asciidoc index 8cb0881827747..abc79ae5c7d71 100644 --- a/docs/reference/ml/apis/get-datafeed.asciidoc +++ b/docs/reference/ml/apis/get-datafeed.asciidoc @@ -19,7 +19,14 @@ Retrieves configuration information for {dfeeds}. `GET _ml/datafeeds/` + -`GET _ml/datafeeds/_all` + +`GET _ml/datafeeds/_all` + +[[ml-get-datafeed-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. [[ml-get-datafeed-desc]] ==== {api-description-title} @@ -34,7 +41,7 @@ IMPORTANT: This API returns a maximum of 10,000 {dfeeds}. [[ml-get-datafeed-path-parms]] ==== {api-path-parms-title} -`feed_id`:: +`` (Optional):: (string) Identifier for the {dfeed}. It can be a {dfeed} identifier or a wildcard expression. If you do not specify one of these options, the API returns information about all {dfeeds}. @@ -48,13 +55,6 @@ The API returns the following information: (array) An array of {dfeed} objects. For more information, see <>. -[[ml-get-datafeed-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - [[ml-get-datafeed-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-filter.asciidoc b/docs/reference/ml/apis/get-filter.asciidoc index c69b717427272..ad5fee343f6d5 100644 --- a/docs/reference/ml/apis/get-filter.asciidoc +++ b/docs/reference/ml/apis/get-filter.asciidoc @@ -15,6 +15,13 @@ Retrieves filters. `GET _ml/filters/` +[[ml-get-filter-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-get-filter-desc]] ==== {api-description-title} @@ -24,16 +31,16 @@ You can get a single filter or all filters. For more information, see [[ml-get-filter-path-parms]] ==== {api-path-parms-title} -`filter_id`:: +`` (Optional):: (string) Identifier for the filter. [[ml-get-filter-query-parms]] ==== {api-query-parms-title} -`from`::: +`from` (Optional)::: (integer) Skips the specified number of filters. -`size`::: +`size` (Optional)::: (integer) Specifies the maximum number of filters to obtain. [[ml-get-filter-results]] @@ -45,13 +52,6 @@ The API returns the following information: (array) An array of filter resources. For more information, see <>. -[[ml-get-filter-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - [[ml-get-filter-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-influencer.asciidoc b/docs/reference/ml/apis/get-influencer.asciidoc index fedcac2079245..8d7ca889a264f 100644 --- a/docs/reference/ml/apis/get-influencer.asciidoc +++ b/docs/reference/ml/apis/get-influencer.asciidoc @@ -13,39 +13,49 @@ Retrieves job results for one or more influencers. `GET _ml/anomaly_detectors//results/influencers` +[[ml-get-influencer-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also +need `read` index privilege on the index that stores the results. The +`machine_learning_admin` and `machine_learning_user` roles provide these +privileges. See {stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. + [[ml-get-influencer-path-parms]] ==== {api-path-parms-title} -`job_id`:: +`` (Required):: (string) Identifier for the job. [[ml-get-influencer-request-body]] ==== {api-request-body-title} -`desc`:: +`desc` (Optional):: (boolean) If true, the results are sorted in descending order. -`end`:: +`end` (Optional):: (string) Returns influencers with timestamps earlier than this time. -`exclude_interim`:: +`exclude_interim` (Optional):: (boolean) If true, the output excludes interim results. By default, interim results are included. -`influencer_score`:: +`influencer_score` (Optional):: (double) Returns influencers with anomaly scores greater or equal than this value. -`page`:: +`page` (Optional):: `from`::: (integer) Skips the specified number of influencers. `size`::: (integer) Specifies the maximum number of influencers to obtain. -`sort`:: +`sort` (Optional):: (string) Specifies the sort field for the requested influencers. By default the influencers are sorted by the `influencer_score` value. -`start`:: +`start` (Optional):: (string) Returns influencers with timestamps after this time. [[ml-get-influencer-results]] @@ -57,16 +67,6 @@ The API returns the following information: (array) An array of influencer objects. For more information, see <>. -[[ml-get-influencer-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. You also need `read` index privilege on the index -that stores the results. The `machine_learning_admin` and `machine_learning_user` -roles provide these privileges. For more information, see -{stack-ov}/security-privileges.html[Security Privileges] and -{stack-ov}/built-in-roles.html[Built-in Roles]. - [[ml-get-influencer-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-job-stats.asciidoc b/docs/reference/ml/apis/get-job-stats.asciidoc index 4b32b11abf868..8a705d7ff9ed1 100644 --- a/docs/reference/ml/apis/get-job-stats.asciidoc +++ b/docs/reference/ml/apis/get-job-stats.asciidoc @@ -17,7 +17,14 @@ Retrieves usage information for jobs. `GET _ml/anomaly_detectors/_stats` + -`GET _ml/anomaly_detectors/_all/_stats` + +`GET _ml/anomaly_detectors/_all/_stats` + +[[ml-get-job-stats-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. [[ml-get-job-stats-desc]] ==== {api-description-title} @@ -32,7 +39,7 @@ IMPORTANT: This API returns a maximum of 10,000 jobs. [[ml-get-job-stats-path-parms]] ==== {api-path-parms-title} -`job_id`:: +`` (Optional):: (string) An identifier for the job. It can be a job identifier, a group name, or a wildcard expression. If you do not specify one of these options, the API returns statistics for all jobs. @@ -46,13 +53,6 @@ The API returns the following information: (array) An array of job statistics objects. For more information, see <>. -[[ml-get-job-stats-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - [[ml-get-job-stats-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-job.asciidoc b/docs/reference/ml/apis/get-job.asciidoc index a4bbb66b5d03f..176ca09fc56c5 100644 --- a/docs/reference/ml/apis/get-job.asciidoc +++ b/docs/reference/ml/apis/get-job.asciidoc @@ -19,6 +19,13 @@ Retrieves configuration information for jobs. `GET _ml/anomaly_detectors/_all` +[[ml-get-job-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-get-job-desc]] ==== {api-description-title} @@ -32,7 +39,7 @@ IMPORTANT: This API returns a maximum of 10,000 jobs. [[ml-get-job-path-parms]] ==== {api-path-parms-title} -`job_id`:: +` (Optional)`:: (string) Identifier for the job. It can be a job identifier, a group name, or a wildcard expression. If you do not specify one of these options, the API returns information for all jobs. @@ -46,13 +53,6 @@ The API returns the following information: (array) An array of job resources. For more information, see <>. -[[ml-get-job-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. - [[ml-get-job-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-ml-info.asciidoc b/docs/reference/ml/apis/get-ml-info.asciidoc index b60a36eed2985..2c486741ffd42 100644 --- a/docs/reference/ml/apis/get-ml-info.asciidoc +++ b/docs/reference/ml/apis/get-ml-info.asciidoc @@ -15,6 +15,15 @@ Returns defaults and limits used by machine learning. `GET _ml/info` +[[get-ml-info-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. The +`machine_learning_admin` and `machine_learning_user` roles provide these +privileges. See {stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. + [[get-ml-info-desc]] ==== {api-description-title} @@ -23,15 +32,6 @@ understand machine learning configurations where some options are not specified, meaning that the defaults should be used. This endpoint may be used to find out what those defaults are. -[[get-ml-info-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. The `machine_learning_admin` and `machine_learning_user` -roles provide these privileges. For more information, see -{stack-ov}/security-privileges.html[Security privileges] and -{stack-ov}/built-in-roles.html[Built-in roles]. - [[get-ml-info-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-overall-buckets.asciidoc b/docs/reference/ml/apis/get-overall-buckets.asciidoc index 81c5c371ac4a1..4d8287f9a54f7 100644 --- a/docs/reference/ml/apis/get-overall-buckets.asciidoc +++ b/docs/reference/ml/apis/get-overall-buckets.asciidoc @@ -18,6 +18,16 @@ bucket results of multiple jobs. `GET _ml/anomaly_detectors/_all/results/overall_buckets` +[[ml-get-overall-buckets-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also +need `read` index privilege on the index that stores the results. The +`machine_learning_admin` and `machine_learning_user` roles provide these +privileges. See {stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. + [[ml-get-overall-buckets-desc]] ==== {api-description-title} @@ -46,37 +56,38 @@ overall buckets with a span equal to the largest job's `bucket_span`. [[ml-get-overall-buckets-path-parms]] ==== {api-path-parms-title} -`job_id`:: +`` (Required):: (string) Identifier for the job. It can be a job identifier, a group name, a comma-separated list of jobs or groups, or a wildcard expression. [[ml-get-overall-buckets-request-body]] ==== {api-request-body-title} -`allow_no_jobs`:: +`allow_no_jobs` (Optional):: (boolean) If `false` and the `job_id` does not match any job an error will be returned. The default value is `true`. -`bucket_span`:: +`bucket_span` (Optional):: (string) The span of the overall buckets. Must be greater or equal to the largest job's `bucket_span`. Defaults to the largest job's `bucket_span`. -`end`:: +`end` (Optional):: (string) Returns overall buckets with timestamps earlier than this time. -`exclude_interim`:: +`exclude_interim` (Optional):: (boolean) If `true`, the output excludes interim overall buckets. Overall buckets are interim if any of the job buckets within the overall bucket interval are interim. By default, interim results are included. -`overall_score`:: - (double) Returns overall buckets with overall scores greater or equal than this value. +`overall_score` (Optional):: + (double) Returns overall buckets with overall scores greater or equal than + this value. -`start`:: +`start` (Optional):: (string) Returns overall buckets with timestamps after this time. -`top_n`:: +`top_n` (Optional):: (integer) The number of top job bucket scores to be used in the `overall_score` calculation. The default value is `1`. @@ -89,16 +100,6 @@ The API returns the following information: (array) An array of overall bucket objects. For more information, see <>. -[[ml-get-overall-buckets-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. You also need `read` index privilege on the index -that stores the results. The `machine_learning_admin` and `machine_learning_user` -roles provide these privileges. For more information, see -{stack-ov}/security-privileges.html[Security Privileges] and -{stack-ov}/built-in-roles.html[Built-in Roles]. - [[ml-get-overall-buckets-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-record.asciidoc b/docs/reference/ml/apis/get-record.asciidoc index fec36aa4a5651..0acc3e0e49fce 100644 --- a/docs/reference/ml/apis/get-record.asciidoc +++ b/docs/reference/ml/apis/get-record.asciidoc @@ -13,39 +13,49 @@ Retrieves anomaly records for a job. `GET _ml/anomaly_detectors//results/records` +[[ml-get-record-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. You also +need `read` index privilege on the index that stores the results. The +`machine_learning_admin` and `machine_learning_user` roles provide these +privileges. See {stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. + [[ml-get-record-path-parms]] ==== {api-path-parms-title} -`job_id`:: +`job_id` (Required):: (string) Identifier for the job. [[ml-get-record-request-body]] ==== {api-request-body-title} -`desc`:: +`desc` (Optional):: (boolean) If true, the results are sorted in descending order. -`end`:: +`end` (Optional):: (string) Returns records with timestamps earlier than this time. -`exclude_interim`:: +`exclude_interim` (Optional):: (boolean) If true, the output excludes interim results. By default, interim results are included. -`page`:: +`page` (Optional):: `from`::: (integer) Skips the specified number of records. `size`::: (integer) Specifies the maximum number of records to obtain. -`record_score`:: +`record_score` (Optional):: (double) Returns records with anomaly scores greater or equal than this value. -`sort`:: +`sort` (Optional):: (string) Specifies the sort field for the requested records. By default, the records are sorted by the `anomaly_score` value. -`start`:: +`start` (Optional):: (string) Returns records with timestamps after this time. [[ml-get-record-results]] @@ -57,16 +67,6 @@ The API returns the following information: (array) An array of record objects. For more information, see <>. -[[ml-get-record-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. You also need `read` index privilege on the index -that stores the results. The `machine_learning_admin` and `machine_learning_user` -roles provide these privileges. For more information, see -{stack-ov}/security-privileges.html[Security privileges] and -{stack-ov}/built-in-roles.html[Built-in roles]. - [[ml-get-record-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/get-snapshot.asciidoc b/docs/reference/ml/apis/get-snapshot.asciidoc index eb5bc4354f27f..ea1b15df33f33 100644 --- a/docs/reference/ml/apis/get-snapshot.asciidoc +++ b/docs/reference/ml/apis/get-snapshot.asciidoc @@ -15,36 +15,43 @@ Retrieves information about model snapshots. `GET _ml/anomaly_detectors//model_snapshots/` +[[ml-get-snapshot-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-get-snapshot-path-parms]] ==== {api-path-parms-title} -`job_id`:: +`` (Required):: (string) Identifier for the job. -`snapshot_id`:: +`` (Optional):: (string) Identifier for the model snapshot. If you do not specify this optional parameter, the API returns information about all model snapshots. [[ml-get-snapshot-request-body]] ==== {api-request-body-title} -`desc`:: +`desc` (Optional):: (boolean) If true, the results are sorted in descending order. -`end`:: +`end` (Optional):: (date) Returns snapshots with timestamps earlier than this time. -`from`:: +`from` (Optional):: (integer) Skips the specified number of snapshots. -`size`:: +`size` (Optional):: (integer) Specifies the maximum number of snapshots to obtain. -`sort`:: +`sort` (Optional):: (string) Specifies the sort field for the requested snapshots. By default, the snapshots are sorted by their timestamp. -`start`:: +`start` (Optional):: (string) Returns snapshots with timestamps after this time. [[ml-get-snapshot-results]] @@ -56,13 +63,6 @@ The API returns the following information: (array) An array of model snapshot objects. For more information, see <>. -[[ml-get-snapshot-prereqs]] -==== {api-prereq-title} - -You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster -privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-get-snapshot-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/open-job.asciidoc b/docs/reference/ml/apis/open-job.asciidoc index 4966ab9fc654e..84000cb89b0de 100644 --- a/docs/reference/ml/apis/open-job.asciidoc +++ b/docs/reference/ml/apis/open-job.asciidoc @@ -15,34 +15,35 @@ A job can be opened and closed multiple times throughout its lifecycle. `POST _ml/anomaly_detectors/{job_id}/_open` +[[ml-open-job-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-open-job-desc]] ==== {api-description-title} When you open a new job, it starts with an empty model. -When you open an existing job, the most recent model state is automatically loaded. -The job is ready to resume its analysis from where it left off, once new data is received. +When you open an existing job, the most recent model state is automatically +loaded. The job is ready to resume its analysis from where it left off, once new +data is received. [[ml-open-job-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: -(string) Identifier for the job +`` (Required):: + (string) Identifier for the job [[ml-open-job-request-body]] ==== {api-request-body-title} -`timeout`:: +`timeout` (Optional):: (time) Controls the time to wait until a job has opened. The default value is 30 minutes. -[[ml-open-job-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-open-job-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/post-calendar-event.asciidoc b/docs/reference/ml/apis/post-calendar-event.asciidoc index 1a3614045ea4b..88d771f3b7f18 100644 --- a/docs/reference/ml/apis/post-calendar-event.asciidoc +++ b/docs/reference/ml/apis/post-calendar-event.asciidoc @@ -13,6 +13,13 @@ Posts scheduled events in a calendar. `POST _ml/calendars//events` +[[ml-post-calendar-event-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-post-calendar-event-desc]] ==== {api-description-title} @@ -22,23 +29,16 @@ of which must have a start time, end time, and description. [[ml-post-calendar-event-path-parms]] ==== {api-path-parms-title} -`calendar_id` (required):: +`` (Required):: (string) Identifier for the calendar. [[ml-post-calendar-event-request-body]] ==== {api-request-body-title} -`events`:: - (array) A list of one of more scheduled events. The event's start and end times - may be specified as integer milliseconds since the epoch or as a string in ISO 8601 - format. See <>. - -[[ml-post-calendar-event-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. +`events` (Required):: + (array) A list of one of more scheduled events. The event's start and end + times may be specified as integer milliseconds since the epoch or as a string + in ISO 8601 format. See <>. [[ml-post-calendar-event-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/post-data.asciidoc b/docs/reference/ml/apis/post-data.asciidoc index 39fb048d8b448..3c2d0e49fde93 100644 --- a/docs/reference/ml/apis/post-data.asciidoc +++ b/docs/reference/ml/apis/post-data.asciidoc @@ -13,6 +13,13 @@ Sends data to an anomaly detection job for analysis. `POST _ml/anomaly_detectors//_data` +[[ml-post-data-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-post-data-desc]] ==== {api-description-title} @@ -45,17 +52,17 @@ or a comma-separated list. [[ml-post-data-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: - (string) Identifier for the job +`` (Required):: + (string) Identifier for the job. [[ml-post-data-query-parms]] ==== {api-query-parms-title} -`reset_start`:: - (string) Specifies the start of the bucket resetting range +`reset_start` (Optional):: + (string) Specifies the start of the bucket resetting range. -`reset_end`:: - (string) Specifies the end of the bucket resetting range +`reset_end` (Optional):: + (string) Specifies the end of the bucket resetting range. [[ml-post-data-request-body]] ==== {api-request-body-title} @@ -63,17 +70,11 @@ or a comma-separated list. A sequence of one or more JSON documents containing the data to be analyzed. Only whitespace characters are permitted in between the documents. -[[ml-post-data-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-post-data-example]] ==== {api-examples-title} -The following example posts data from the it_ops_new_kpi.json file to the `it_ops_new_kpi` job: +The following example posts data from the `it_ops_new_kpi.json` file to the +`it_ops_new_kpi` job: [source,js] -------------------------------------------------- @@ -82,8 +83,8 @@ $ curl -s -H "Content-type: application/json" --data-binary @it_ops_new_kpi.json -------------------------------------------------- -When the data is sent, you receive information about the operational progress of the job. -For example: +When the data is sent, you receive information about the operational progress of +the job. For example: [source,js] ---- diff --git a/docs/reference/ml/apis/preview-datafeed.asciidoc b/docs/reference/ml/apis/preview-datafeed.asciidoc index cfffe96b3de37..4ca3ebcd10e40 100644 --- a/docs/reference/ml/apis/preview-datafeed.asciidoc +++ b/docs/reference/ml/apis/preview-datafeed.asciidoc @@ -15,6 +15,13 @@ Previews a {dfeed}. `GET _ml/datafeeds//_preview` +[[ml-preview-datafeed-prereqs]] +==== {api-prereq-title} + +* If {es} {security-features} are enabled, you must have `monitor_ml`, `monitor`, +`manage_ml`, or `manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-preview-datafeed-desc]] ==== {api-description-title} @@ -22,30 +29,19 @@ The preview {dfeeds} API returns the first "page" of results from the `search` that is created by using the current {dfeed} settings. This preview shows the structure of the data that will be passed to the anomaly detection engine. +IMPORTANT: When {es} {security-features} are enabled, the {dfeed} query is +previewed using the credentials of the user calling the preview {dfeed} API. +When the {dfeed} is started it runs the query using the roles of the last user +to create or update it. If the two sets of roles differ then the preview may +not accurately reflect what the {dfeed} will return when started. To avoid +such problems, the same user that creates/updates the {dfeed} should preview +it to ensure it is returning the expected data. + [[ml-preview-datafeed-path-parms]] ==== {api-path-parms-title} -`datafeed_id` (required):: - (string) Identifier for the {dfeed} - -[[ml-preview-datafeed-prereqs]] -==== {api-prereq-title} - -If {es} {security-features} are enabled, you must have `monitor_ml`, `monitor`, -`manage_ml`, or `manage` cluster privileges to use this API. For more -information, see -{stack-ov}/security-privileges.html[Security privileges]. - -[[ml-preview-datafeed-security]] -==== Security Integration - -When {es} {security-features} are enabled, the {dfeed} query is previewed using -the credentials of the user calling the preview {dfeed} API. When the {dfeed} -is started it runs the query using the roles of the last user to -create or update it. If the two sets of roles differ then the preview may -not accurately reflect what the {dfeed} will return when started. To avoid -such problems, the same user that creates/updates the {dfeed} should preview -it to ensure it is returning the expected data. +`` (Required):: + (string) Identifier for the {dfeed}. [[ml-preview-datafeed-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/put-calendar-job.asciidoc b/docs/reference/ml/apis/put-calendar-job.asciidoc index abf124c8a1114..0a1ee2fcc6de0 100644 --- a/docs/reference/ml/apis/put-calendar-job.asciidoc +++ b/docs/reference/ml/apis/put-calendar-job.asciidoc @@ -13,22 +13,22 @@ Adds a job to a calendar. `PUT _ml/calendars//jobs/` +[[ml-put-calendar-job-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-put-calendar-job-path-parms]] ==== {api-path-parms-title} -`calendar_id` (required):: +`` (Required):: (string) Identifier for the calendar. -`job_id` (required):: - (string) An identifier for the job. It can be a job identifier, a group name, or a - comma-separated list of jobs or groups. - -[[ml-put-calendar-job-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security Privileges]. +`` (Required):: + (string) An identifier for the job. It can be a job identifier, a group name, + or a comma-separated list of jobs or groups. [[ml-put-calendar-job-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/put-calendar.asciidoc b/docs/reference/ml/apis/put-calendar.asciidoc index b7ea586a106a6..f98dd541d6753 100644 --- a/docs/reference/ml/apis/put-calendar.asciidoc +++ b/docs/reference/ml/apis/put-calendar.asciidoc @@ -13,6 +13,13 @@ Instantiates a calendar. `PUT _ml/calendars/` +[[ml-put-calendar-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-put-calendar-desc]] ==== {api-description-title} @@ -22,22 +29,15 @@ For more information, see [[ml-put-calendar-path-parms]] ==== {api-path-parms-title} -`calendar_id` (required):: +`` (Required):: (string) Identifier for the calendar. [[ml-put-calendar-request-body]] ==== {api-request-body-title} -`description`:: +`description` (Optional):: (string) A description of the calendar. -[[ml-put-calendar-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-put-calendar-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/put-datafeed.asciidoc b/docs/reference/ml/apis/put-datafeed.asciidoc index 428af146b4da4..6c4578abb1671 100644 --- a/docs/reference/ml/apis/put-datafeed.asciidoc +++ b/docs/reference/ml/apis/put-datafeed.asciidoc @@ -15,21 +15,34 @@ Instantiates a {dfeed}. `PUT _ml/datafeeds/` +[[ml-put-datafeed-prereqs]] +==== {api-prereq-title} + +* If {es} {security-features} are enabled, you must have `manage_ml` or `manage` +cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-put-datafeed-desc]] ==== {api-description-title} You must create a job before you create a {dfeed}. You can associate only one {dfeed} to each job. -IMPORTANT: You must use {kib} or this API to create a {dfeed}. Do not put a {dfeed} - directly to the `.ml-config` index using the Elasticsearch index API. - If {es} {security-features} are enabled, do not give users `write` - privileges on the `.ml-config` index. +[IMPORTANT] +==== +* You must use {kib} or this API to create a {dfeed}. Do not put a +{dfeed} directly to the `.ml-config` index using the {es} index API. If {es} +{security-features} are enabled, do not give users `write` privileges on the +`.ml-config` index. +* When {es} {security-features} are enabled, your {dfeed} remembers which roles +the user who created it had at the time of creation and runs the query using +those same roles. +==== [[ml-put-datafeed-path-parms]] ==== {api-path-parms-title} -`feed_id` (required):: +`` (Required):: (string) A numerical character string that uniquely identifies the {dfeed}. This identifier can contain lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It must start and end with alphanumeric characters. @@ -37,73 +50,58 @@ IMPORTANT: You must use {kib} or this API to create a {dfeed}. Do not put a {df [[ml-put-datafeed-request-body]] ==== {api-request-body-title} -`aggregations`:: +`aggregations` (Optional):: (object) If set, the {dfeed} performs aggregation searches. For more information, see <>. -`chunking_config`:: +`chunking_config` (Optional):: (object) Specifies how data searches are split into time chunks. See <>. -`delayed_data_check_config`:: +`delayed_data_check_config` (Optional):: (object) Specifies whether the data feed checks for missing data and the size of the window. See <>. -`frequency`:: +`frequency` (Optional):: (time units) The interval at which scheduled queries are made while the {dfeed} runs in real time. The default value is either the bucket span for short bucket spans, or, for longer bucket spans, a sensible fraction of the bucket span. For example: `150s`. -`indices` (required):: +`indices` (Required):: (array) An array of index names. Wildcards are supported. For example: `["it_ops_metrics", "server*"]`. -`job_id` (required):: +`job_id` (Required):: (string) A numerical character string that uniquely identifies the job. -`query`:: +`query` (Optional):: (object) The {es} query domain-specific language (DSL). This value corresponds to the query object in an {es} search POST body. All the options that are supported by {Es} can be used, as this object is passed verbatim to {es}. By default, this property has the following value: `{"match_all": {"boost": 1}}`. -`query_delay`:: +`query_delay` (Optional):: (time units) The number of seconds behind real time that data is queried. For example, if data from 10:04 a.m. might not be searchable in {es} until 10:06 a.m., set this property to 120 seconds. The default value is `60s`. -`script_fields`:: +`script_fields` (Optional):: (object) Specifies scripts that evaluate custom expressions and returns script fields to the {dfeed}. The <> in a job can contain - functions that use these script fields. - For more information, + functions that use these script fields. For more information, see {ref}/search-request-script-fields.html[Script Fields]. -`scroll_size`:: +`scroll_size` (Optional):: (unsigned integer) The `size` parameter that is used in {es} searches. The default value is `1000`. For more information about these properties, see <>. -[[ml-put-datafeed-prereqs]] -==== {api-prereq-title} - -If {es} {security-features} are enabled, you must have `manage_ml`, or `manage` -cluster privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - -[[ml-put-datafeed-security]] -==== Security integration - -When {es} {security-features} are enabled, your {dfeed} remembers which roles the -user who created it had at the time of creation and runs the query using those -same roles. - [[ml-put-datafeed-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/put-filter.asciidoc b/docs/reference/ml/apis/put-filter.asciidoc index 61ed24f4d5b9b..ad0d6d34ea81d 100644 --- a/docs/reference/ml/apis/put-filter.asciidoc +++ b/docs/reference/ml/apis/put-filter.asciidoc @@ -13,6 +13,13 @@ Instantiates a filter. `PUT _ml/filters/` +[[ml-put-filter-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-put-filter-desc]] ==== {api-description-title} @@ -23,28 +30,21 @@ the `custom_rules` property of <` (Required):: (string) Identifier for the filter. [[ml-put-filter-request-body]] ==== {api-request-body-title} -`description`:: +`description` (Optional):: (string) A description of the filter. -`items`:: +`items` (Required):: (array of strings) The items of the filter. A wildcard `*` can be used at the beginning or the end of an item. Up to 10000 items are allowed in each filter. -[[ml-put-filter-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-put-filter-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/put-job.asciidoc b/docs/reference/ml/apis/put-job.asciidoc index acf8d9db82499..c60de488180d9 100644 --- a/docs/reference/ml/apis/put-job.asciidoc +++ b/docs/reference/ml/apis/put-job.asciidoc @@ -13,6 +13,13 @@ Instantiates a job. `PUT _ml/anomaly_detectors/` +[[ml-put-job-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-put-job-desc]] ==== {api-description-title} @@ -24,7 +31,7 @@ IMPORTANT: You must use {kib} or this API to create a {ml} job. Do not put a job [[ml-put-job-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: +`` (Required):: (string) Identifier for the job. This identifier can contain lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It must start and end with alphanumeric characters. @@ -32,61 +39,54 @@ IMPORTANT: You must use {kib} or this API to create a {ml} job. Do not put a job [[ml-put-job-request-body]] ==== {api-request-body-title} -`analysis_config`:: +`analysis_config` (Required):: (object) The analysis configuration, which specifies how to analyze the data. See <>. -`analysis_limits`:: +`analysis_limits` (Optional):: (object) Specifies runtime limits for the job. See <>. -`background_persist_interval`:: +`background_persist_interval` (Optional):: (time units) Advanced configuration option. The time between each periodic persistence of the model. See <>. -`custom_settings`:: +`custom_settings` (Optional):: (object) Advanced configuration option. Contains custom meta data about the job. See <>. -`data_description` (required):: +`data_description` (Required):: (object) Describes the format of the input data. This object is required, but it can be empty (`{}`). See <>. -`description`:: +`description` (Optional):: (string) A description of the job. -`groups`:: +`groups` (Optional):: (array of strings) A list of job groups. See <>. -`model_plot_config`:: +`model_plot_config` (Optional):: (object) Advanced configuration option. Specifies to store model information along with the results. This adds overhead to the performance of the system and is not feasible for jobs with many entities, see <>. -`model_snapshot_retention_days`:: +`model_snapshot_retention_days` (Optional):: (long) The time in days that model snapshots are retained for the job. Older snapshots are deleted. The default value is `1`, which means snapshots are retained for one day (twenty-four hours). -`renormalization_window_days`:: +`renormalization_window_days` (Optional):: (long) Advanced configuration option. The period over which adjustments to the score are applied, as new data is seen. See <>. -`results_index_name`:: +`results_index_name` (Optional):: (string) A text string that affects the name of the {ml} results index. The default value is `shared`, which generates an index named `.ml-anomalies-shared`. -`results_retention_days`:: +`results_retention_days` (Optional):: (long) Advanced configuration option. The number of days for which job results are retained. See <>. -[[ml-put-job-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-put-job-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/revert-snapshot.asciidoc b/docs/reference/ml/apis/revert-snapshot.asciidoc index f470b4ec60ffb..86d3d4c14a93c 100644 --- a/docs/reference/ml/apis/revert-snapshot.asciidoc +++ b/docs/reference/ml/apis/revert-snapshot.asciidoc @@ -13,6 +13,13 @@ Reverts to a specific snapshot. `POST _ml/anomaly_detectors//model_snapshots//_revert` +[[ml-revert-snapshot-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-revert-snapshot-desc]] ==== {api-description-title} @@ -29,16 +36,16 @@ IMPORTANT: Before you revert to a saved snapshot, you must close the job. [[ml-revert-snapshot-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: - (string) Identifier for the job +`` (Required):: + (string) Identifier for the job. -`snapshot_id` (required):: - (string) Identifier for the model snapshot +`` (Required):: + (string) Identifier for the model snapshot. [[ml-revert-snapshot-request-body]] ==== {api-request-body-title} -`delete_intervening_results`:: +`delete_intervening_results` (Optional):: (boolean) If true, deletes the results in the time period between the latest results and the time of the reverted snapshot. It also resets the model to accept records for this time period. The default value is false. @@ -47,13 +54,6 @@ NOTE: If you choose not to delete intervening results when reverting a snapshot, the job will not accept input data that is older than the current time. If you want to resend data, then delete the intervening results. -[[ml-revert-snapshot-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-revert-snapshot-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/set-upgrade-mode.asciidoc b/docs/reference/ml/apis/set-upgrade-mode.asciidoc index 16ddbe19e5995..6a00656430c66 100644 --- a/docs/reference/ml/apis/set-upgrade-mode.asciidoc +++ b/docs/reference/ml/apis/set-upgrade-mode.asciidoc @@ -26,6 +26,13 @@ POST /_ml/set_upgrade_mode?enabled=false&timeout=10m `POST _ml/set_upgrade_mode` +[[ml-set-upgrade-mode-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-set-upgrade-mode-desc]] ==== {api-description-title} @@ -54,20 +61,13 @@ IMPORTANT: No new {ml} jobs can be opened while the `upgrade_mode` setting is [[ml-set-upgrade-mode-query-parms]] ==== {api-query-parms-title} -`enabled`:: +`enabled` (Optional):: (boolean) When `true`, this enables `upgrade_mode`. Defaults to `false` -`timeout`:: +`timeout` (Optional):: (time) The time to wait for the request to be completed. The default value is 30 seconds. -[[ml-set-upgrade-mode-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-set-upgrade-mode-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/start-datafeed.asciidoc b/docs/reference/ml/apis/start-datafeed.asciidoc index 35c632d5c41c6..05cf0766e9522 100644 --- a/docs/reference/ml/apis/start-datafeed.asciidoc +++ b/docs/reference/ml/apis/start-datafeed.asciidoc @@ -17,6 +17,13 @@ A {dfeed} can be started and stopped multiple times throughout its lifecycle. `POST _ml/datafeeds//_start` +[[ml-start-datafeed-prereqs]] +==== {api-prereq-title} + +* If {es} {security-features} are enabled, you must have `manage_ml` or `manage` +cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-start-datafeed-desc]] ==== {api-description-title} @@ -58,41 +65,31 @@ If you specify a `start` value that is earlier than the timestamp of the latest processed record, the {dfeed} continues from 1 millisecond after the timestamp of the latest processed record. +IMPORTANT: When {es} {security-features} are enabled, your {dfeed} remembers +which roles the last user to create or update it had at the time of +creation/update and runs the query using those same roles. + [[ml-start-datafeed-path-parms]] ==== {api-path-parms-title} -`feed_id` (required):: -(string) Identifier for the {dfeed} +`` (Required):: + (string) Identifier for the {dfeed}. [[ml-start-datafeed-request-body]] ==== {api-request-body-title} -`end`:: +`end` (Optional):: (string) The time that the {dfeed} should end. This value is exclusive. The default value is an empty string. -`start`:: +`start` (Optional):: (string) The time that the {dfeed} should begin. This value is inclusive. The default value is an empty string. -`timeout`:: +`timeout` (Optional):: (time) Controls the amount of time to wait until a {dfeed} starts. The default value is 20 seconds. -[[ml-start-datafeed-prereqs]] -==== {api-prereq-title} - -If {es} {security-features} are enabled, you must have `manage_ml`, or `manage` -cluster privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - -[[ml-start-datafeed-security]] -==== Security integration - -When {es} {security-features} are enabled, your {dfeed} remembers which roles the -last user to create or update it had at the time of creation/update and runs the -query using those same roles. - [[ml-start-datafeed-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/stop-datafeed.asciidoc b/docs/reference/ml/apis/stop-datafeed.asciidoc index 497975f425c88..bdac8d51fab04 100644 --- a/docs/reference/ml/apis/stop-datafeed.asciidoc +++ b/docs/reference/ml/apis/stop-datafeed.asciidoc @@ -10,9 +10,6 @@ Stops one or more {dfeeds}. -A {dfeed} that is stopped ceases to retrieve data from {es}. -A {dfeed} can be started and stopped multiple times throughout its lifecycle. - [[ml-stop-datafeed-request]] ==== {api-request-title} @@ -22,9 +19,19 @@ A {dfeed} can be started and stopped multiple times throughout its lifecycle. `POST _ml/datafeeds/_all/_stop` +[[ml-stop-datafeed-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-stop-datafeed-desc]] ==== {api-description-title} +A {dfeed} that is stopped ceases to retrieve data from {es}. +A {dfeed} can be started and stopped multiple times throughout its lifecycle. + You can stop multiple {dfeeds} in a single API request by using a comma-separated list of {dfeeds} or a wildcard expression. You can close all {dfeeds} by using `_all` or by specifying `*` as the ``. @@ -32,27 +39,20 @@ comma-separated list of {dfeeds} or a wildcard expression. You can close all [[ml-stop-datafeed-path-parms]] ==== {api-path-parms-title} -`feed_id`:: +`` (Required):: (string) Identifier for the {dfeed}. It can be a {dfeed} identifier or a wildcard expression. [[ml-stop-datafeed-request-body]] ==== {api-request-body-title} -`force`:: +`force` (Optional):: (boolean) If true, the {dfeed} is stopped forcefully. -`timeout`:: +`timeout` (Optional):: (time) Controls the amount of time to wait until a {dfeed} stops. The default value is 20 seconds. -[[ml-stop-datafeed-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-stop-datafeed-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/update-datafeed.asciidoc b/docs/reference/ml/apis/update-datafeed.asciidoc index 9c3e56e66a642..b57088673d845 100644 --- a/docs/reference/ml/apis/update-datafeed.asciidoc +++ b/docs/reference/ml/apis/update-datafeed.asciidoc @@ -15,61 +15,72 @@ Updates certain properties of a {dfeed}. `POST _ml/datafeeds//_update` +[[ml-update-datafeed-prereqs]] +==== {api-prereq-title} + +* If {es} {security-features} are enabled, you must have `manage_ml`, or `manage` +cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-update-datafeed-desc]] ==== {api-description-title} -NOTE: If you update the `delayed_data_check_config` property, you must stop and +If you update the `delayed_data_check_config` property, you must stop and start the {dfeed} for the change to be applied. +IMPORTANT: When {es} {security-features} are enabled, your {dfeed} remembers +which roles the user who updated it had at the time of update and runs the query +using those same roles. + [[ml-update-datafeed-path-parms]] ==== {api-path-parms-title} -`feed_id` (required):: - (string) Identifier for the {dfeed} +`` (Required):: + (string) Identifier for the {dfeed}. [[ml-update-datafeed-request-body]] ==== {api-request-body-title} The following properties can be updated after the {dfeed} is created: -`aggregations`:: +`aggregations` (Optional):: (object) If set, the {dfeed} performs aggregation searches. For more information, see <>. -`chunking_config`:: +`chunking_config` (Optional):: (object) Specifies how data searches are split into time chunks. See <>. -`delayed_data_check_config`:: +`delayed_data_check_config` (Optional):: (object) Specifies whether the data feed checks for missing data and the size of the window. See <>. -`frequency`:: +`frequency` (Optional):: (time units) The interval at which scheduled queries are made while the {dfeed} runs in real time. The default value is either the bucket span for short bucket spans, or, for longer bucket spans, a sensible fraction of the bucket span. For example: `150s`. -`indices`:: +`indices` (Optional):: (array) An array of index names. Wildcards are supported. For example: `["it_ops_metrics", "server*"]`. -`job_id`:: +`job_id` (Optional):: (string) A numerical character string that uniquely identifies the job. -`query`:: +`query` (Optional):: (object) The {es} query domain-specific language (DSL). This value corresponds to the query object in an {es} search POST body. All the options that are supported by {es} can be used, as this object is passed verbatim to {es}. By default, this property has the following value: `{"match_all": {"boost": 1}}`. -`query_delay`:: +`query_delay` (Optional):: (time units) The number of seconds behind real-time that data is queried. For example, if data from 10:04 a.m. might not be searchable in {es} until 10:06 a.m., set this property to 120 seconds. The default value is `60s`. -`script_fields`:: +`script_fields` (Optional):: (object) Specifies scripts that evaluate custom expressions and returns script fields to the {dfeed}. The <> in a job can contain @@ -77,27 +88,13 @@ The following properties can be updated after the {dfeed} is created: For more information, see {ref}/search-request-script-fields.html[Script Fields]. -`scroll_size`:: +`scroll_size` (Optional):: (unsigned integer) The `size` parameter that is used in {es} searches. The default value is `1000`. For more information about these properties, see <>. -[[ml-update-datafeed-prereqs]] -==== {api-prereq-title} - -If {es} {security-features} are enabled, you must have `manage_ml`, or `manage` -cluster privileges to use this API. For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - -[[ml-update-datafeed-security]] -==== Security Integration - -When {es} {security-features} are enabled, your {dfeed} remembers which roles the -user who updated it had at the time of update and runs the query using those -same roles. - [[ml-update-datafeed-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/update-filter.asciidoc b/docs/reference/ml/apis/update-filter.asciidoc index 842808ebe558a..df8f3056d12cc 100644 --- a/docs/reference/ml/apis/update-filter.asciidoc +++ b/docs/reference/ml/apis/update-filter.asciidoc @@ -13,35 +13,36 @@ Updates the description of a filter, adds items, or removes items. `POST _ml/filters//_update` +[[ml-update-filter-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-update-filter-path-parms]] ==== {api-path-parms-title} -`filter_id` (required):: +`` (Required):: (string) Identifier for the filter. [[ml-update-filter-request-body]] -==== Request Body +==== {api-request-body-title} -`description`:: +`description` (Optional):: (string) A description for the filter. See <>. -`add_items`:: +`add_items` (Optional):: (array of strings) The items to add to the filter. -`remove_items`:: +`remove_items` (Optional):: (array of strings) The items to remove from the filter. -[[ml-update-filter-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-update-filter-example]] ==== {api-examples-title} -You can change the description, add and remove items to the `safe_domains` filter as follows: +You can change the description, add and remove items to the `safe_domains` +filter as follows: [source,js] -------------------------------------------------- diff --git a/docs/reference/ml/apis/update-job.asciidoc b/docs/reference/ml/apis/update-job.asciidoc index 39c510bda1efa..e78bda613d801 100644 --- a/docs/reference/ml/apis/update-job.asciidoc +++ b/docs/reference/ml/apis/update-job.asciidoc @@ -13,11 +13,19 @@ Updates certain properties of a job. `POST _ml/anomaly_detectors//_update` +[[ml-update-job-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + + [[ml-update-job-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: - (string) Identifier for the job +`` (Required):: + (string) Identifier for the job. [[ml-update-job-request-body]] ==== {api-request-body-title} @@ -88,13 +96,6 @@ A detector update object has the following properties: No other detector property can be updated. -[[ml-update-job-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-update-job-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/update-snapshot.asciidoc b/docs/reference/ml/apis/update-snapshot.asciidoc index edf9e05d867e7..1fe2ed5384bc0 100644 --- a/docs/reference/ml/apis/update-snapshot.asciidoc +++ b/docs/reference/ml/apis/update-snapshot.asciidoc @@ -13,37 +13,38 @@ Updates certain properties of a snapshot. `POST _ml/anomaly_detectors//model_snapshots//_update` +[[ml-update-snapshot-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + + [[ml-update-snapshot-path-parms]] ==== {api-path-parms-title} -`job_id` (required):: - (string) Identifier for the job +`` (Required):: + (string) Identifier for the job. -`snapshot_id` (required):: - (string) Identifier for the model snapshot +`` (Required):: + (string) Identifier for the model snapshot. [[ml-update-snapshot-request-body]] ==== {api-request-body-title} The following properties can be updated after the model snapshot is created: -`description`:: - (string) An optional description of the model snapshot. For example, +`description` (Optional):: + (string) A description of the model snapshot. For example, "Before black friday". -`retain`:: +`retain` (Optional):: (boolean) If true, this snapshot will not be deleted during automatic cleanup of snapshots older than `model_snapshot_retention_days`. Note that this snapshot will still be deleted when the job is deleted. The default value is false. -[[ml-update-snapshot-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-update-snapshot-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/validate-detector.asciidoc b/docs/reference/ml/apis/validate-detector.asciidoc index a3b7ca66072c1..2e5896b95cc93 100644 --- a/docs/reference/ml/apis/validate-detector.asciidoc +++ b/docs/reference/ml/apis/validate-detector.asciidoc @@ -13,6 +13,13 @@ Validates detector configuration information. `POST _ml/anomaly_detectors/_validate/detector` +[[ml-valid-detector-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-valid-detector-desc]] ==== {api-description-title} @@ -25,13 +32,6 @@ before you create a job. For a list of the properties that you can specify in the body of this API, see <>. -[[ml-valid-detector-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-valid-detector-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/validate-job.asciidoc b/docs/reference/ml/apis/validate-job.asciidoc index 651e45715699d..faa7cab2f3928 100644 --- a/docs/reference/ml/apis/validate-job.asciidoc +++ b/docs/reference/ml/apis/validate-job.asciidoc @@ -13,6 +13,13 @@ Validates job configuration information. `POST _ml/anomaly_detectors/_validate` +[[ml-valid-job-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `manage_ml` or +`manage` cluster privileges to use this API. See +{stack-ov}/security-privileges.html[Security privileges]. + [[ml-valid-job-desc]] ==== {api-description-title} @@ -25,13 +32,6 @@ create the job. For a list of the properties that you can specify in the body of this API, see <>. -[[ml-valid-job-prereqs]] -==== {api-prereq-title} - -You must have `manage_ml`, or `manage` cluster privileges to use this API. -For more information, see -{stack-ov}/security-privileges.html[Security privileges]. - [[ml-valid-job-example]] ==== {api-examples-title} From f1e3a8fd6c96ff231dae1af97857c1cd945ff5b9 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 27 Jun 2019 15:16:24 -0700 Subject: [PATCH 048/140] [DOCS] Adds data frame API response codes for allow_no_match (#43666) --- .../apis/delete-transform.asciidoc | 2 +- .../apis/get-transform-stats.asciidoc | 44 ++++++++++++++----- .../data-frames/apis/get-transform.asciidoc | 42 ++++++++++++++---- .../apis/preview-transform.asciidoc | 2 +- .../data-frames/apis/put-transform.asciidoc | 5 +-- .../data-frames/apis/start-transform.asciidoc | 2 +- .../data-frames/apis/stop-transform.asciidoc | 30 +++++++++++-- docs/reference/ml/apis/close-job.asciidoc | 2 - 8 files changed, 98 insertions(+), 31 deletions(-) diff --git a/docs/reference/data-frames/apis/delete-transform.asciidoc b/docs/reference/data-frames/apis/delete-transform.asciidoc index 23c70d914f08f..d772bc3c15d89 100644 --- a/docs/reference/data-frames/apis/delete-transform.asciidoc +++ b/docs/reference/data-frames/apis/delete-transform.asciidoc @@ -22,7 +22,7 @@ Deletes an existing {dataframe-transform}. [[delete-data-frame-transform-prereqs]] ==== {api-prereq-title} -If the {es} {security-features} are enabled, you must have +* If the {es} {security-features} are enabled, you must have `manage_data_frame_transforms` cluster privileges to use this API. The built-in `data_frame_transforms_admin` role has these privileges. For more information, see {stack-ov}/security-privileges.html[Security privileges] and diff --git a/docs/reference/data-frames/apis/get-transform-stats.asciidoc b/docs/reference/data-frames/apis/get-transform-stats.asciidoc index 5751c8a3ea7bc..889a109b8a376 100644 --- a/docs/reference/data-frames/apis/get-transform-stats.asciidoc +++ b/docs/reference/data-frames/apis/get-transform-stats.asciidoc @@ -31,15 +31,21 @@ Retrieves usage information for {dataframe-transforms}. [[get-data-frame-transform-stats-prereqs]] ==== {api-prereq-title} -If the {es} {security-features} are enabled, you must have +* If the {es} {security-features} are enabled, you must have `monitor_data_frame_transforms` cluster privileges to use this API. The built-in `data_frame_transforms_user` role has these privileges. For more information, see {stack-ov}/security-privileges.html[Security privileges] and {stack-ov}/built-in-roles.html[Built-in roles]. -//[discrete] -//[[get-data-frame-transform-stats-desc]] -//===== {api-description-title} +[discrete] +[[get-data-frame-transform-stats-desc]] +==== {api-description-title} + +You can get statistics for multiple {dataframe-transforms} in a single API +request by using a comma-separated list of identifiers or a wildcard expression. +You can get statistics for all {dataframe-transforms} by using `_all`, by +specifying `*` as the ``, or by omitting the +``. [discrete] [[get-data-frame-transform-stats-path-parms]] @@ -56,17 +62,26 @@ see {stack-ov}/security-privileges.html[Security privileges] and ==== {api-query-parms-title} `allow_no_match` (Optional):: - (boolean) Whether to ignore if a wildcard expression matches no - {dataframe-transforms}. This includes `_all` string or when no transforms have - been specified. The default is `true`. + (boolean) Specifies what to do when the request: ++ +-- +* Contains wildcard expressions and there are no {dataframe-transforms} that match. +* Contains the `_all` string or no identifiers and there are no matches. +* Contains wildcard expressions and there are only partial matches. + +The default value is `true`, which returns an empty `transforms` array when +there are no matches and the subset of results when there are partial matches. +If this parameter is `false`, the request returns a `404` status code when there +are no matches or only partial matches. +-- `from` (Optional):: - (integer) Skips the specified number of {dataframe-transforms}. The - default value is `0`. + (integer) Skips the specified number of {dataframe-transforms}. The + default value is `0`. `size` (Optional):: - (integer) Specifies the maximum number of {dataframe-transforms} to obtain. - The default value is `100`. + (integer) Specifies the maximum number of {dataframe-transforms} to obtain. + The default value is `100`. [discrete] [[get-data-frame-transform-stats-response]] @@ -75,6 +90,13 @@ see {stack-ov}/security-privileges.html[Security privileges] and `transforms`:: (array) An array of statistics objects for {dataframe-transforms}, which are sorted by the `id` value in ascending order. + +[[get-data-frame-transform-stats-response-codes]] +==== {api-response-codes-title} + +`404` (Missing resources):: + If `allow_no_match` is `false`, this code indicates that there are no + resources that match the request or only partial matches for the request. [discrete] [[get-data-frame-transform-stats-example]] diff --git a/docs/reference/data-frames/apis/get-transform.asciidoc b/docs/reference/data-frames/apis/get-transform.asciidoc index 847d764c01267..bf7901c191e33 100644 --- a/docs/reference/data-frames/apis/get-transform.asciidoc +++ b/docs/reference/data-frames/apis/get-transform.asciidoc @@ -30,12 +30,22 @@ Retrieves configuration information for {dataframe-transforms}. [[get-data-frame-transform-prereqs]] ==== {api-prereq-title} -If the {es} {security-features} are enabled, you must have +* If the {es} {security-features} are enabled, you must have `monitor_data_frame_transforms` cluster privileges to use this API. The built-in `data_frame_transforms_user` role has these privileges. For more information, see {stack-ov}/security-privileges.html[Security privileges] and {stack-ov}/built-in-roles.html[Built-in roles]. +[discrete] +[[get-data-frame-transform-desc]] +==== {api-description-title} + +You can get information for multiple {dataframe-transforms} in a single API +request by using a comma-separated list of identifiers or a wildcard expression. +You can get information for all {dataframe-transforms} by using `_all`, by +specifying `*` as the ``, or by omitting the +``. + [discrete] [[get-data-frame-transform-path-parms]] ==== {api-path-parms-title} @@ -51,17 +61,26 @@ see {stack-ov}/security-privileges.html[Security privileges] and ==== {api-query-parms-title} `allow_no_match` (Optional):: - (boolean) Whether to ignore if a wildcard expression matches no - {dataframe-transforms}. This includes `_all` string or when no transforms have - been specified. The default is `true`. +(boolean) Specifies what to do when the request: ++ +-- +* Contains wildcard expressions and there are no {dataframe-transforms} that match. +* Contains the `_all` string or no identifiers and there are no matches. +* Contains wildcard expressions and there are only partial matches. + +The default value is `true`, which returns an empty `transforms` array when +there are no matches and the subset of results when there are partial matches. +If this parameter is `false`, the request returns a `404` status code when there +are no matches or only partial matches. +-- `from` (Optional):: - (integer) Skips the specified number of {dataframe-transforms}. The - default value is `0`. + (integer) Skips the specified number of {dataframe-transforms}. The + default value is `0`. `size` (Optional):: - (integer) Specifies the maximum number of {dataframe-transforms} to obtain. - The default value is `100`. + (integer) Specifies the maximum number of {dataframe-transforms} to obtain. + The default value is `100`. [discrete] [[get-data-frame-transform-response]] @@ -70,6 +89,13 @@ see {stack-ov}/security-privileges.html[Security privileges] and `transforms`:: (array) An array of transform resources, which are sorted by the `id` value in ascending order. + +[[get-data-frame-transform-response-codes]] +==== {api-response-codes-title} + +`404` (Missing resources):: + If `allow_no_match` is `false`, this code indicates that there are no + resources that match the request or only partial matches for the request. [discrete] [[get-data-frame-transform-example]] diff --git a/docs/reference/data-frames/apis/preview-transform.asciidoc b/docs/reference/data-frames/apis/preview-transform.asciidoc index 4e11fd5eda231..5dfe1f2f1d726 100644 --- a/docs/reference/data-frames/apis/preview-transform.asciidoc +++ b/docs/reference/data-frames/apis/preview-transform.asciidoc @@ -22,7 +22,7 @@ Previews a {dataframe-transform}. [[preview-data-frame-transform-prereq]] ==== {api-prereq-title} -If the {es} {security-features} are enabled, you must have +* If the {es} {security-features} are enabled, you must have `manage_data_frame_transforms` cluster privileges to use this API. The built-in `data_frame_transforms_admin` role has these privileges. You must also have `read` and `view_index_metadata` privileges on the source index for the diff --git a/docs/reference/data-frames/apis/put-transform.asciidoc b/docs/reference/data-frames/apis/put-transform.asciidoc index 3c6a5251bffd8..6910cb85a25f5 100644 --- a/docs/reference/data-frames/apis/put-transform.asciidoc +++ b/docs/reference/data-frames/apis/put-transform.asciidoc @@ -22,7 +22,7 @@ Instantiates a {dataframe-transform}. [[put-data-frame-transform-prereqs]] ==== {api-prereq-title} -If the {es} {security-features} are enabled, you must have +* If the {es} {security-features} are enabled, you must have `manage_data_frame_transforms` cluster privileges to use this API. The built-in `data_frame_transforms_admin` role has these privileges. You must also have `read` and `view_index_metadata` privileges on the source index and `read`, @@ -30,10 +30,9 @@ have `read` and `view_index_metadata` privileges on the source index and `read`, information, see {stack-ov}/security-privileges.html[Security privileges] and {stack-ov}/built-in-roles.html[Built-in roles]. - [discrete] [[put-data-frame-transform-desc]] -===== {api-description-title} +==== {api-description-title} IMPORTANT: You must use {kib} or this API to create a {dataframe-transform}. Do not put a {dataframe-transform} directly into any diff --git a/docs/reference/data-frames/apis/start-transform.asciidoc b/docs/reference/data-frames/apis/start-transform.asciidoc index e7ae0353f5ca6..b76bcb0dd4796 100644 --- a/docs/reference/data-frames/apis/start-transform.asciidoc +++ b/docs/reference/data-frames/apis/start-transform.asciidoc @@ -22,7 +22,7 @@ Starts one or more {dataframe-transforms}. [[start-data-frame-transform-prereqs]] ==== {api-prereq-title} -If the {es} {security-features} are enabled, you must have +* If the {es} {security-features} are enabled, you must have `manage_data_frame_transforms` cluster privileges to use this API. You must also have `view_index_metadata` privileges on the source index for the {dataframe-transform}. For more information, see diff --git a/docs/reference/data-frames/apis/stop-transform.asciidoc b/docs/reference/data-frames/apis/stop-transform.asciidoc index 9a08aaf0a9b4d..80c2654babe0d 100644 --- a/docs/reference/data-frames/apis/stop-transform.asciidoc +++ b/docs/reference/data-frames/apis/stop-transform.asciidoc @@ -26,7 +26,7 @@ Stops one or more {dataframe-transforms}. [[stop-data-frame-transform-prereq]] ==== {api-prereq-title} -If the {es} {security-features} are enabled, you must have +* If the {es} {security-features} are enabled, you must have `manage_data_frame_transforms` cluster privileges to use this API. The built-in `data_frame_transforms_admin` role has these privileges. For more information, see {stack-ov}/security-privileges.html[Security privileges] and @@ -55,9 +55,23 @@ All {dataframe-transforms} can be stopped by using `_all` or `*` as the ==== {api-query-parms-title} `allow_no_match` (Optional):: - (boolean) Whether to ignore if a wildcard expression matches no - {dataframe-transforms}. This includes `_all` string or when no transforms have - been specified. The default is `true`. +(boolean) Specifies what to do when the request: ++ +-- +* Contains wildcard expressions and there are no {dataframe-transforms} that match. +* Contains the `_all` string or no identifiers and there are no matches. +* Contains wildcard expressions and there are only partial matches. + +The default value is `true`, which returns a successful acknowledgement message +when there are no matches. When there are only partial matches, the API stops +the appropriate {dataframe-transforms}. For example, if the request contains +`test-id1*,test-id2*` as the identifiers and there are no {dataframe-transforms} +that match `test-id2*`, the API nonetheless stops the {dataframe-transforms} +that match `test-id1*`. + +If this parameter is `false`, the request returns a `404` status code when there +are no matches or only partial matches. +-- `timeout` (Optional):: (time value) If `wait_for_completion=true`, the API blocks for (at maximum) @@ -72,6 +86,14 @@ All {dataframe-transforms} can be stopped by using `_all` or `*` as the completely stops. If set to `false`, the API returns immediately and the indexer will be stopped asynchronously in the background. Defaults to `false`. +[discrete] +[[stop-data-frame-transform-response-codes]] +==== {api-response-codes-title} + +`404` (Missing resources):: + If `allow_no_match` is `false`, this code indicates that there are no + resources that match the request or only partial matches for the request. + [discrete] [[stop-data-frame-transform-example]] ==== {api-examples-title} diff --git a/docs/reference/ml/apis/close-job.asciidoc b/docs/reference/ml/apis/close-job.asciidoc index 8eb78cff0064e..2a38648e48fe5 100644 --- a/docs/reference/ml/apis/close-job.asciidoc +++ b/docs/reference/ml/apis/close-job.asciidoc @@ -12,7 +12,6 @@ A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. -[discrete] [[ml-close-job-request]] ==== {api-request-title} @@ -55,7 +54,6 @@ after the close job API returns. The `force` query parameter should only be use situations where the job has already failed, or where you are not interested in results the job might have recently produced or might produce in the future. -[discrete] [[ml-close-job-path-parms]] ==== {api-path-parms-title} From 6f5b3a6c71f498263b98b93614c98c2e6a73817f Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 27 Jun 2019 18:27:16 -0400 Subject: [PATCH 049/140] Do not use MockInternalEngine in GatewayIndexStateIT (#43716) GatewayIndexStateIT#testRecoverBrokenIndexMetadata replies on the flushing on shutdown. This behaviour, however, can be randomly disabled in MockInternalEngine. Closes #43034 --- .../main/java/org/elasticsearch/index/IndexService.java | 2 -- .../main/java/org/elasticsearch/index/engine/Engine.java | 2 -- .../org/elasticsearch/gateway/GatewayIndexStateIT.java | 9 ++++++--- .../memory/breaker/RandomExceptionCircuitBreakerIT.java | 8 ++------ .../search/basic/SearchWithRandomExceptionsIT.java | 8 ++------ .../java/org/elasticsearch/test/ESIntegTestCase.java | 9 ++++++++- .../elasticsearch/snapshots/SourceOnlySnapshotIT.java | 7 ++----- 7 files changed, 20 insertions(+), 25 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index ddfb23f60e121..7c3dc0fe497be 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -471,8 +471,6 @@ private void closeShard(String reason, ShardId sId, IndexShard indexShard, Store try { // only flush we are we closed (closed index or shutdown) and if we are not deleted final boolean flushEngine = deleted.get() == false && closed.get(); - logger.trace("[{}] closing shard (flushEngine: {}, deleted: {}, closed: {})", shardId, flushEngine, deleted.get(), - closed.get()); indexShard.close(reason, flushEngine); } catch (Exception e) { logger.debug(() -> new ParameterizedMessage("[{}] failed to close index shard", shardId), e); diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index a50d0c790d4d9..e21b816aefd80 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1716,8 +1716,6 @@ public void flushAndClose() throws IOException { close(); // double close is not a problem } } - } else { - logger.trace("skipping flushAndClose as already closed"); } awaitPendingClose(); } diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/server/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index 56bbccf134771..bfc45b3118800 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -52,7 +52,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster.RestartCallback; -import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; import java.util.List; @@ -76,6 +75,12 @@ public class GatewayIndexStateIT extends ESIntegTestCase { private final Logger logger = LogManager.getLogger(GatewayIndexStateIT.class); + @Override + protected boolean addMockInternalEngine() { + // testRecoverBrokenIndexMetadata replies on the flushing on shutdown behavior which can be randomly disabled in MockInternalEngine. + return false; + } + public void testMappingMetaDataParsed() throws Exception { logger.info("--> starting 1 nodes"); internalCluster().startNode(); @@ -346,8 +351,6 @@ public Settings onNodeStopped(final String nodeName) throws Exception { * allocated in our metadata that we recover. In that case we now have the ability to check the index on local recovery from disk * if it is sane and if we can successfully create an IndexService. This also includes plugins etc. */ - // temporarily enabling TRACE to aid debugging https://github.com/elastic/elasticsearch/issues/43034 - @TestLogging("_root:TRACE") public void testRecoverBrokenIndexMetadata() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); diff --git a/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index f379b7ee5229a..b86b622705e1e 100644 --- a/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -51,10 +51,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.HashSet; import java.util.List; import java.util.Random; -import java.util.Set; import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; @@ -70,10 +68,8 @@ protected Collection> nodePlugins() { } @Override - protected Collection> getMockPlugins() { - Set> mocks = new HashSet<>(super.getMockPlugins()); - mocks.remove(MockEngineFactoryPlugin.class); - return mocks; + protected boolean addMockInternalEngine() { + return false; } public void testBreakerWithRandomExceptions() throws IOException, InterruptedException, ExecutionException { diff --git a/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index 891e64f52372e..3d196d7a0d98e 100644 --- a/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -49,10 +49,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.HashSet; import java.util.List; import java.util.Random; -import java.util.Set; import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -65,10 +63,8 @@ protected Collection> nodePlugins() { } @Override - protected Collection> getMockPlugins() { - Set> mocks = new HashSet<>(super.getMockPlugins()); - mocks.remove(MockEngineFactoryPlugin.class); - return mocks; + protected boolean addMockInternalEngine() { + return false; } public void testRandomExceptions() throws IOException, InterruptedException, ExecutionException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index da82e857d766d..81be0e26cffce 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1891,6 +1891,13 @@ protected boolean addMockHttpTransport() { return true; } + /** + * Returns {@code true} if this test cluster can use a mock internal engine. Defaults to true. + */ + protected boolean addMockInternalEngine() { + return true; + } + /** * Returns a function that allows to wrap / filter all clients that are exposed by the test cluster. This is useful * for debugging or request / response pre and post processing. It also allows to intercept all calls done by the test @@ -1913,7 +1920,7 @@ protected Collection> getMockPlugins() { if (randomBoolean()) { mocks.add(NodeMocksPlugin.class); } - if (randomBoolean()) { + if (addMockInternalEngine() && randomBoolean()) { mocks.add(MockEngineFactoryPlugin.class); } if (randomBoolean()) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotIT.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotIT.java index 81be978d33103..a54c57aceb3a6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotIT.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.query.QueryBuilders; @@ -65,10 +64,8 @@ protected Collection> nodePlugins() { } @Override - protected Collection> getMockPlugins() { - Collection> classes = new ArrayList<>(super.getMockPlugins()); - classes.remove(MockEngineFactoryPlugin.class); - return classes; + protected boolean addMockInternalEngine() { + return false; } public static final class MyPlugin extends Plugin implements RepositoryPlugin, EnginePlugin { From 5566cd59d99bcaed14d2f246c7451308e8681dd9 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Thu, 27 Jun 2019 17:28:11 -0500 Subject: [PATCH 050/140] [ML] Allowing stopped status in HLRC testStartStop (#43710) --- .../elasticsearch/client/DataFrameTransformIT.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java index 45c3038b662a8..5ec2265d0451e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java @@ -258,8 +258,10 @@ public void testStartStop() throws IOException { GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id), client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); assertThat(statsResponse.getTransformsStateAndStats(), hasSize(1)); - IndexerState indexerState = statsResponse.getTransformsStateAndStats().get(0).getTransformState().getIndexerState(); - assertThat(indexerState, is(oneOf(IndexerState.STARTED, IndexerState.INDEXING))); + DataFrameTransformTaskState taskState = statsResponse.getTransformsStateAndStats().get(0).getTransformState().getTaskState(); + + // Since we are non-continuous, the transform could auto-stop between being started earlier and us gathering the statistics + assertThat(taskState, is(oneOf(DataFrameTransformTaskState.STARTED, DataFrameTransformTaskState.STOPPED))); StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, Boolean.TRUE, null); StopDataFrameTransformResponse stopResponse = @@ -267,6 +269,12 @@ public void testStartStop() throws IOException { assertTrue(stopResponse.isAcknowledged()); assertThat(stopResponse.getNodeFailures(), empty()); assertThat(stopResponse.getTaskFailures(), empty()); + + // Calling stop with wait_for_completion assures that we will be in the `STOPPED` state for the transform task + statsResponse = execute(new GetDataFrameTransformStatsRequest(id), + client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); + taskState = statsResponse.getTransformsStateAndStats().get(0).getTransformState().getTaskState(); + assertThat(taskState, is(DataFrameTransformTaskState.STOPPED)); } public void testPreview() throws IOException { From 97051649c2980f9163eea50f3fc16c6928b5e601 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Fri, 28 Jun 2019 10:30:19 +0300 Subject: [PATCH 051/140] [FEATURE][ML] Support multiple source indices for df-analytics (#43702) This commit adds support for multiple source indices. In order to deal with multiple indices having different mappings, it attempts a best-effort approach to merge the mappings assuming there are no conflicts. In case conflicts exists an error will be returned. To allow users creating custom mappings for special use cases, the destination index is now allowed to exist before the analytics job runs. In addition, settings are no longer copied except for the `index.number_of_shards` and `index.number_of_replicas`. --- .../dataframe/DataFrameAnalyticsSource.java | 23 ++- .../MlClientDocumentationIT.java | 16 +- .../DataFrameAnalyticsSourceTests.java | 2 +- .../dataframe/DataFrameAnalyticsSource.java | 31 ++-- .../xpack/core/ml/job/messages/Messages.java | 3 +- .../DataFrameAnalyticsSourceTests.java | 2 +- .../ml/qa/ml-with-security/build.gradle | 3 +- .../integration/RunDataFrameAnalyticsIT.java | 115 ++++++++++++- .../xpack/ml/MachineLearning.java | 2 +- ...ransportStartDataFrameAnalyticsAction.java | 27 ++- .../ml/dataframe/DataFrameAnalyticsIndex.java | 156 +++++++++++++----- .../dataframe/DataFrameAnalyticsManager.java | 37 ++++- .../xpack/ml/dataframe/MappingsMerger.java | 100 +++++++++++ .../ml/dataframe/SourceDestValidator.java | 11 +- .../DataFrameDataExtractorFactory.java | 8 +- .../extractor/ExtractedFieldsDetector.java | 29 ++-- .../DataFrameAnalyticsIndexTests.java | 135 +++++++++------ .../ml/dataframe/MappingsMergerTests.java | 137 +++++++++++++++ .../dataframe/SourceDestValidatorTests.java | 20 ++- .../ExtractedFieldsDetectorTests.java | 6 +- .../test/ml/data_frame_analytics_crud.yml | 32 +++- 21 files changed, 713 insertions(+), 182 deletions(-) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/MappingsMerger.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/MappingsMergerTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java index c36799cd3b4a7..9a6de159bea3e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java @@ -28,6 +28,8 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Arrays; +import java.util.List; import java.util.Objects; public class DataFrameAnalyticsSource implements ToXContentObject { @@ -46,19 +48,19 @@ public static Builder builder() { private static ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_source", true, Builder::new); static { - PARSER.declareString(Builder::setIndex, INDEX); + PARSER.declareStringArray(Builder::setIndex, INDEX); PARSER.declareObject(Builder::setQueryConfig, (p, c) -> QueryConfig.fromXContent(p), QUERY); } - private final String index; + private final String[] index; private final QueryConfig queryConfig; - private DataFrameAnalyticsSource(String index, @Nullable QueryConfig queryConfig) { + private DataFrameAnalyticsSource(String[] index, @Nullable QueryConfig queryConfig) { this.index = Objects.requireNonNull(index); this.queryConfig = queryConfig; } - public String getIndex() { + public String[] getIndex() { return index; } @@ -83,13 +85,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; DataFrameAnalyticsSource other = (DataFrameAnalyticsSource) o; - return Objects.equals(index, other.index) + return Arrays.equals(index, other.index) && Objects.equals(queryConfig, other.queryConfig); } @Override public int hashCode() { - return Objects.hash(index, queryConfig); + return Objects.hash(Arrays.asList(index), queryConfig); } @Override @@ -99,16 +101,21 @@ public String toString() { public static class Builder { - private String index; + private String[] index; private QueryConfig queryConfig; private Builder() {} - public Builder setIndex(String index) { + public Builder setIndex(String... index) { this.index = index; return this; } + public Builder setIndex(List index) { + this.index = index.toArray(new String[0]); + return this; + } + public Builder setQueryConfig(QueryConfig queryConfig) { this.queryConfig = queryConfig; return this; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 8fd63a065d944..e7cbaa743b544 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -2802,7 +2802,7 @@ public void onFailure(Exception e) { } public void testGetDataFrameAnalytics() throws Exception { - createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()); + createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); @@ -2851,7 +2851,7 @@ public void onFailure(Exception e) { } public void testGetDataFrameAnalyticsStats() throws Exception { - createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()); + createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); @@ -2901,7 +2901,7 @@ public void onFailure(Exception e) { } public void testPutDataFrameAnalytics() throws Exception { - createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()); + createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); { @@ -2994,7 +2994,7 @@ public void onFailure(Exception e) { } public void testDeleteDataFrameAnalytics() throws Exception { - createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()); + createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); @@ -3044,9 +3044,9 @@ public void onFailure(Exception e) { } public void testStartDataFrameAnalytics() throws Exception { - createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()); + createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); highLevelClient().index( - new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()).source(XContentType.JSON, "total", 10000) + new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); @@ -3101,9 +3101,9 @@ public void onFailure(Exception e) { } public void testStopDataFrameAnalytics() throws Exception { - createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()); + createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); highLevelClient().index( - new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()).source(XContentType.JSON, "total", 10000) + new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSourceTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSourceTests.java index 246cd67c1baf1..d82e1999f3034 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSourceTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSourceTests.java @@ -36,7 +36,7 @@ public class DataFrameAnalyticsSourceTests extends AbstractXContentTestCase createParser(boolean ignoreUnknownFields) { ConstructingObjectParser parser = new ConstructingObjectParser<>("data_frame_analytics_source", - ignoreUnknownFields, a -> new DataFrameAnalyticsSource((String) a[0], (QueryProvider) a[1])); - parser.declareString(ConstructingObjectParser.constructorArg(), INDEX); + ignoreUnknownFields, a -> new DataFrameAnalyticsSource(((List) a[0]).toArray(new String[0]), (QueryProvider) a[1])); + parser.declareStringArray(ConstructingObjectParser.constructorArg(), INDEX); parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> QueryProvider.fromXContent(p, ignoreUnknownFields, Messages.DATA_FRAME_ANALYTICS_BAD_QUERY_FORMAT), QUERY); return parser; } - private final String index; + private final String[] index; private final QueryProvider queryProvider; - public DataFrameAnalyticsSource(String index, @Nullable QueryProvider queryProvider) { + public DataFrameAnalyticsSource(String[] index, @Nullable QueryProvider queryProvider) { this.index = ExceptionsHelper.requireNonNull(index, INDEX); - if (index.isEmpty()) { - throw ExceptionsHelper.badRequestException("[{}] must be non-empty", INDEX); + if (index.length == 0) { + throw new IllegalArgumentException("source.index must specify at least one index"); + } + if (Arrays.stream(index).anyMatch(Strings::isNullOrEmpty)) { + throw new IllegalArgumentException("source.index must contain non-null and non-empty strings"); } this.queryProvider = queryProvider == null ? QueryProvider.defaultQuery() : queryProvider; } public DataFrameAnalyticsSource(StreamInput in) throws IOException { - index = in.readString(); + index = in.readStringArray(); queryProvider = QueryProvider.fromStream(in); } public DataFrameAnalyticsSource(DataFrameAnalyticsSource other) { - this.index = other.index; + this.index = Arrays.copyOf(other.index, other.index.length); this.queryProvider = new QueryProvider(other.queryProvider); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(index); + out.writeStringArray(index); queryProvider.writeTo(out); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(INDEX.getPreferredName(), index); + builder.array(INDEX.getPreferredName(), index); builder.field(QUERY.getPreferredName(), queryProvider.getQuery()); builder.endObject(); return builder; @@ -83,16 +88,16 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; DataFrameAnalyticsSource other = (DataFrameAnalyticsSource) o; - return Objects.equals(index, other.index) + return Arrays.equals(index, other.index) && Objects.equals(queryProvider, other.queryProvider); } @Override public int hashCode() { - return Objects.hash(index, queryProvider); + return Objects.hash(Arrays.asList(index), queryProvider); } - public String getIndex() { + public String[] getIndex() { return index; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 417184f8a752b..dfb95d2adac33 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -51,8 +51,7 @@ public final class Messages { public static final String DATAFEED_ID_ALREADY_TAKEN = "A datafeed with id [{0}] already exists"; public static final String DATA_FRAME_ANALYTICS_BAD_QUERY_FORMAT = "Data Frame Analytics config query is not parsable"; - public static final String DATA_FRAME_ANALYTICS_BAD_FIELD_FILTER = - "No compatible fields could be detected in index [{0}] with name [{1}]"; + public static final String DATA_FRAME_ANALYTICS_BAD_FIELD_FILTER = "No field [{0}] could be detected"; public static final String FILTER_CANNOT_DELETE = "Cannot delete filter [{0}] currently used by jobs {1}"; public static final String FILTER_CONTAINS_TOO_MANY_ITEMS = "Filter [{0}] contains too many items; up to [{1}] items are allowed"; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSourceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSourceTests.java index 8c42dfb7a4cb7..36c4774baa465 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSourceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSourceTests.java @@ -44,7 +44,7 @@ protected DataFrameAnalyticsSource createTestInstance() { } public static DataFrameAnalyticsSource createRandom() { - String index = randomAlphaOfLength(10); + String[] index = generateRandomStringArray(10, 10, false, false); QueryProvider queryProvider = null; if (randomBoolean()) { try { diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index 686f6ed3fbec5..6077b8ab099f6 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -56,7 +56,8 @@ integTest.runner { 'ml/data_frame_analytics_crud/Test put config with unknown top level field', 'ml/data_frame_analytics_crud/Test put config with unknown field in outlier detection analysis', 'ml/data_frame_analytics_crud/Test put config given missing source', - 'ml/data_frame_analytics_crud/Test put config given source with empty index', + 'ml/data_frame_analytics_crud/Test put config given source with empty index array', + 'ml/data_frame_analytics_crud/Test put config given source with empty string in index array', 'ml/data_frame_analytics_crud/Test put config given source without index', 'ml/data_frame_analytics_crud/Test put config given missing dest', 'ml/data_frame_analytics_crud/Test put config given dest with empty index', diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index e10a9ccf01b6a..5696edcf6460c 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -69,7 +69,7 @@ public void testOutlierDetectionWithFewDocuments() throws Exception { } String id = "test_outlier_detection_with_few_docs"; - DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics(id, sourceIndex, null); + DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics(id, new String[] {sourceIndex}, sourceIndex + "-results", null); registerAnalytics(config); putAnalytics(config); @@ -130,7 +130,8 @@ public void testOutlierDetectionWithEnoughDocumentsToScroll() throws Exception { } String id = "test_outlier_detection_with_enough_docs_to_scroll"; - DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics(id, sourceIndex, "custom_ml"); + DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics( + id, new String[] {sourceIndex}, sourceIndex + "-results", "custom_ml"); registerAnalytics(config); putAnalytics(config); @@ -188,7 +189,7 @@ public void testOutlierDetectionWithMoreFieldsThanDocValueFieldLimit() throws Ex } String id = "test_outlier_detection_with_more_fields_than_docvalue_limit"; - DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics(id, sourceIndex, null); + DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics(id, new String[] {sourceIndex}, sourceIndex + "-results", null); registerAnalytics(config); putAnalytics(config); @@ -216,7 +217,7 @@ public void testOutlierDetectionWithMoreFieldsThanDocValueFieldLimit() throws Ex } public void testStopOutlierDetectionWithEnoughDocumentsToScroll() { - String sourceIndex = "test-outlier-detection-with-enough-docs-to-scroll"; + String sourceIndex = "test-stop-outlier-detection-with-enough-docs-to-scroll"; client().admin().indices().prepareCreate(sourceIndex) .addMapping("_doc", "numeric_1", "type=double", "numeric_2", "type=float", "categorical_1", "type=keyword") @@ -236,8 +237,9 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() { fail("Failed to index data: " + bulkResponse.buildFailureMessage()); } - String id = "test_outlier_detection_with_enough_docs_to_scroll"; - DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics(id, sourceIndex, "custom_ml"); + String id = "test_stop_outlier_detection_with_enough_docs_to_scroll"; + DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics( + id, new String[] {sourceIndex}, sourceIndex + "-results", "custom_ml"); registerAnalytics(config); putAnalytics(config); @@ -264,10 +266,107 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() { } } - private static DataFrameAnalyticsConfig buildOutlierDetectionAnalytics(String id, String sourceIndex, @Nullable String resultsField) { + public void testOutlierDetectionWithMultipleSourceIndices() throws Exception { + String sourceIndex1 = "test-outlier-detection-with-multiple-source-indices-1"; + String sourceIndex2 = "test-outlier-detection-with-multiple-source-indices-2"; + String destIndex = "test-outlier-detection-with-multiple-source-indices-results"; + String[] sourceIndex = new String[] { sourceIndex1, sourceIndex2 }; + + client().admin().indices().prepareCreate(sourceIndex1) + .addMapping("_doc", "numeric_1", "type=double", "numeric_2", "type=float", "categorical_1", "type=keyword") + .get(); + + client().admin().indices().prepareCreate(sourceIndex2) + .addMapping("_doc", "numeric_1", "type=double", "numeric_2", "type=float", "categorical_1", "type=keyword") + .get(); + + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + for (String index : sourceIndex) { + for (int i = 0; i < 5; i++) { + IndexRequest indexRequest = new IndexRequest(index); + indexRequest.source("numeric_1", randomDouble(), "numeric_2", randomFloat(), "categorical_1", "foo_" + i); + bulkRequestBuilder.add(indexRequest); + } + } + BulkResponse bulkResponse = bulkRequestBuilder.get(); + if (bulkResponse.hasFailures()) { + fail("Failed to index data: " + bulkResponse.buildFailureMessage()); + } + + String id = "test_outlier_detection_with_multiple_source_indices"; + DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics(id, sourceIndex, destIndex, null); + registerAnalytics(config); + putAnalytics(config); + + assertState(id, DataFrameAnalyticsState.STOPPED); + + startAnalytics(id); + waitUntilAnalyticsIsStopped(id); + + // Check we've got all docs + SearchResponse searchResponse = client().prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) bulkRequestBuilder.numberOfActions())); + + // Check they all have an outlier_score + searchResponse = client().prepareSearch(config.getDest().getIndex()) + .setTrackTotalHits(true) + .setQuery(QueryBuilders.existsQuery("ml.outlier_score")).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) bulkRequestBuilder.numberOfActions())); + } + + public void testOutlierDetectionWithPreExistingDestIndex() throws Exception { + String sourceIndex = "test-outlier-detection-with-pre-existing-dest-index"; + String destIndex = "test-outlier-detection-with-pre-existing-dest-index-results"; + + client().admin().indices().prepareCreate(sourceIndex) + .addMapping("_doc", "numeric_1", "type=double", "numeric_2", "type=float", "categorical_1", "type=keyword") + .get(); + + client().admin().indices().prepareCreate(destIndex) + .addMapping("_doc", "numeric_1", "type=double", "numeric_2", "type=float", "categorical_1", "type=keyword") + .get(); + + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + for (int i = 0; i < 5; i++) { + IndexRequest indexRequest = new IndexRequest(sourceIndex); + indexRequest.source("numeric_1", randomDouble(), "numeric_2", randomFloat(), "categorical_1", "foo_" + i); + bulkRequestBuilder.add(indexRequest); + } + BulkResponse bulkResponse = bulkRequestBuilder.get(); + if (bulkResponse.hasFailures()) { + fail("Failed to index data: " + bulkResponse.buildFailureMessage()); + } + + String id = "test_outlier_detection_with_pre_existing_dest_index"; + DataFrameAnalyticsConfig config = buildOutlierDetectionAnalytics(id, new String[] {sourceIndex}, destIndex, null); + registerAnalytics(config); + putAnalytics(config); + + assertState(id, DataFrameAnalyticsState.STOPPED); + + startAnalytics(id); + waitUntilAnalyticsIsStopped(id); + + // Check we've got all docs + SearchResponse searchResponse = client().prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) bulkRequestBuilder.numberOfActions())); + + // Check they all have an outlier_score + searchResponse = client().prepareSearch(config.getDest().getIndex()) + .setTrackTotalHits(true) + .setQuery(QueryBuilders.existsQuery("ml.outlier_score")).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) bulkRequestBuilder.numberOfActions())); + } + + private static DataFrameAnalyticsConfig buildOutlierDetectionAnalytics(String id, String[] sourceIndex, String destIndex, + @Nullable String resultsField) { DataFrameAnalyticsConfig.Builder configBuilder = new DataFrameAnalyticsConfig.Builder(id); configBuilder.setSource(new DataFrameAnalyticsSource(sourceIndex, null)); - configBuilder.setDest(new DataFrameAnalyticsDest(sourceIndex + "-results", resultsField)); + configBuilder.setDest(new DataFrameAnalyticsDest(destIndex, resultsField)); configBuilder.setAnalysis(new OutlierDetection()); return configBuilder.build(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index cf73ef95a2639..caf5d0c17d2cb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -519,7 +519,7 @@ public Collection createComponents(Client client, ClusterService cluster AnalyticsProcessManager analyticsProcessManager = new AnalyticsProcessManager(client, threadPool, analyticsProcessFactory); DataFrameAnalyticsConfigProvider dataFrameAnalyticsConfigProvider = new DataFrameAnalyticsConfigProvider(client); assert client instanceof NodeClient; - DataFrameAnalyticsManager dataFrameAnalyticsManager = new DataFrameAnalyticsManager(clusterService, (NodeClient) client, + DataFrameAnalyticsManager dataFrameAnalyticsManager = new DataFrameAnalyticsManager((NodeClient) client, dataFrameAnalyticsConfigProvider, analyticsProcessManager); this.dataFrameAnalyticsManager.set(dataFrameAnalyticsManager); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index d3e643b27e220..5665bd0181210 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsManager; +import org.elasticsearch.xpack.ml.dataframe.MappingsMerger; import org.elasticsearch.xpack.ml.dataframe.SourceDestValidator; import org.elasticsearch.xpack.ml.dataframe.extractor.DataFrameDataExtractorFactory; import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; @@ -158,23 +159,35 @@ public void onFailure(Exception e) { ); // Tell the job tracker to refresh the memory requirement for this job and all other jobs that have persistent tasks - ActionListener validateListener = ActionListener.wrap( + ActionListener configListener = ActionListener.wrap( config -> memoryTracker.addDataFrameAnalyticsJobMemoryAndRefreshAllOthers( request.getId(), config.getModelMemoryLimit().getBytes(), memoryRequirementRefreshListener), listener::onFailure ); - // Validate config - ActionListener configListener = ActionListener.wrap( + // Get config + getConfigAndValidate(request.getId(), configListener); + } + + private void getConfigAndValidate(String id, ActionListener finalListener) { + // Validate mappings can be merged + ActionListener firstValidationListener = ActionListener.wrap( + config -> MappingsMerger.mergeMappings(client, config.getHeaders(), config.getSource().getIndex(), ActionListener.wrap( + mappings -> finalListener.onResponse(config), finalListener::onFailure)), + finalListener::onFailure + ); + + // Validate source and dest; check data extraction is possible + ActionListener getConfigListener = ActionListener.wrap( config -> { new SourceDestValidator(clusterService.state(), indexNameExpressionResolver).check(config); - DataFrameDataExtractorFactory.validateConfigAndSourceIndex(client, config, validateListener); + DataFrameDataExtractorFactory.validateConfigAndSourceIndex(client, config, firstValidationListener); }, - listener::onFailure + finalListener::onFailure ); - // Get config - configProvider.get(request.getId(), configListener); + // First, get the config + configProvider.get(id, getConfigListener); } private void waitForAnalyticsStarted(PersistentTasksCustomMetaData.PersistentTask task, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java index 25a2b04e27c56..a682f259358f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java @@ -10,23 +10,34 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import java.time.Clock; -import java.util.Arrays; import java.util.HashMap; -import java.util.List; +import java.util.Iterator; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; + /** * {@link DataFrameAnalyticsIndex} class encapsulates logic for creating destination index based on source index metadata. */ @@ -36,57 +47,103 @@ final class DataFrameAnalyticsIndex { private static final String META = "_meta"; /** - * Unfortunately, getting the settings of an index include internal settings that should - * not be set explicitly. There is no way to filter those out. Thus, we have to maintain - * a list of them and filter them out manually. + * We only preserve the most important settings. + * If the user needs other settings on the destination index they + * should create the destination index before starting the analytics. */ - private static final List INTERNAL_SETTINGS = Arrays.asList( - "index.creation_date", - "index.provided_name", - "index.uuid", - "index.version.created", - "index.version.upgraded" - ); + private static final String[] PRESERVED_SETTINGS = new String[] {"index.number_of_shards", "index.number_of_replicas"}; + + private DataFrameAnalyticsIndex() {} /** * Creates destination index based on source index metadata. */ public static void createDestinationIndex(Client client, Clock clock, - ClusterState clusterState, DataFrameAnalyticsConfig analyticsConfig, ActionListener listener) { - String sourceIndex = analyticsConfig.getSource().getIndex(); - Map headers = analyticsConfig.getHeaders(); - IndexMetaData sourceIndexMetaData = clusterState.getMetaData().getIndices().get(sourceIndex); - if (sourceIndexMetaData == null) { - listener.onFailure(new IndexNotFoundException(sourceIndex)); - return; - } - CreateIndexRequest createIndexRequest = - prepareCreateIndexRequest(sourceIndexMetaData, analyticsConfig.getDest().getIndex(), analyticsConfig.getId(), clock); - ClientHelper.executeWithHeadersAsync( - headers, ClientHelper.ML_ORIGIN, client, CreateIndexAction.INSTANCE, createIndexRequest, listener); + ActionListener createIndexRequestListener = ActionListener.wrap( + createIndexRequest -> ClientHelper.executeWithHeadersAsync(analyticsConfig.getHeaders(), ClientHelper.ML_ORIGIN, client, + CreateIndexAction.INSTANCE, createIndexRequest, listener), + listener::onFailure + ); + + prepareCreateIndexRequest(client, clock, analyticsConfig, createIndexRequestListener); } - private static CreateIndexRequest prepareCreateIndexRequest(IndexMetaData sourceIndexMetaData, - String destinationIndex, - String analyticsId, - Clock clock) { - // Settings - Settings.Builder settingsBuilder = Settings.builder().put(sourceIndexMetaData.getSettings()); - INTERNAL_SETTINGS.forEach(settingsBuilder::remove); - settingsBuilder.put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), DataFrameAnalyticsFields.ID); - settingsBuilder.put(IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey(), SortOrder.ASC); - Settings settings = settingsBuilder.build(); + private static void prepareCreateIndexRequest(Client client, Clock clock, DataFrameAnalyticsConfig config, + ActionListener listener) { + AtomicReference settingsHolder = new AtomicReference<>(); + + String[] sourceIndex = config.getSource().getIndex(); + + ActionListener> mappingsListener = ActionListener.wrap( + mappings -> listener.onResponse(createIndexRequest(clock, config, settingsHolder.get(), mappings)), + listener::onFailure + ); + + ActionListener settingsListener = ActionListener.wrap( + settings -> { + settingsHolder.set(settings); + MappingsMerger.mergeMappings(client, config.getHeaders(), sourceIndex, mappingsListener); + }, + listener::onFailure + ); + + ActionListener getSettingsResponseListener = ActionListener.wrap( + settingsResponse -> settingsListener.onResponse(settings(settingsResponse)), + listener::onFailure + ); + + GetSettingsRequest getSettingsRequest = new GetSettingsRequest(); + getSettingsRequest.indices(sourceIndex); + getSettingsRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); + getSettingsRequest.names(PRESERVED_SETTINGS); + ClientHelper.executeWithHeadersAsync(config.getHeaders(), ML_ORIGIN, client, GetSettingsAction.INSTANCE, + getSettingsRequest, getSettingsResponseListener); + } + + private static CreateIndexRequest createIndexRequest(Clock clock, DataFrameAnalyticsConfig config, Settings settings, + ImmutableOpenMap mappings) { + // There should only be 1 type + assert mappings.size() == 1; - // Mappings - String singleMappingType = sourceIndexMetaData.getMappings().keysIt().next(); - Map mappingsAsMap = sourceIndexMetaData.getMappings().valuesIt().next().sourceAsMap(); + String destinationIndex = config.getDest().getIndex(); + String type = mappings.keysIt().next(); + Map mappingsAsMap = mappings.valuesIt().next().sourceAsMap(); addProperties(mappingsAsMap); - addMetaData(mappingsAsMap, analyticsId, clock); + addMetaData(mappingsAsMap, config.getId(), clock); + return new CreateIndexRequest(destinationIndex, settings).mapping(type, mappingsAsMap); + } + + private static Settings settings(GetSettingsResponse settingsResponse) { + Integer maxNumberOfShards = findMaxSettingValue(settingsResponse, IndexMetaData.SETTING_NUMBER_OF_SHARDS); + Integer maxNumberOfReplicas = findMaxSettingValue(settingsResponse, IndexMetaData.SETTING_NUMBER_OF_REPLICAS); + + Settings.Builder settingsBuilder = Settings.builder(); + settingsBuilder.put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), DataFrameAnalyticsFields.ID); + settingsBuilder.put(IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey(), SortOrder.ASC); + if (maxNumberOfShards != null) { + settingsBuilder.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, maxNumberOfShards); + } + if (maxNumberOfReplicas != null) { + settingsBuilder.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, maxNumberOfReplicas); + } + return settingsBuilder.build(); + } - return new CreateIndexRequest(destinationIndex, settings).mapping(singleMappingType, mappingsAsMap); + @Nullable + private static Integer findMaxSettingValue(GetSettingsResponse settingsResponse, String settingKey) { + Integer maxValue = null; + Iterator settingsIterator = settingsResponse.getIndexToSettings().valuesIt(); + while (settingsIterator.hasNext()) { + Settings settings = settingsIterator.next(); + Integer indexValue = settings.getAsInt(settingKey, null); + if (indexValue != null) { + maxValue = maxValue == null ? indexValue : Math.max(indexValue, maxValue); + } + } + return maxValue; } private static void addProperties(Map mappingsAsMap) { @@ -111,6 +168,21 @@ private static V getOrPutDefault(Map map, K key, Supplier v return value; } - private DataFrameAnalyticsIndex() {} + public static void updateMappingsToDestIndex(Client client, DataFrameAnalyticsConfig analyticsConfig, GetIndexResponse getIndexResponse, + ActionListener listener) { + // We have validated the destination index should match a single index + assert getIndexResponse.indices().length == 1; + + ImmutableOpenMap mappings = getIndexResponse.getMappings().get(getIndexResponse.indices()[0]); + String type = mappings.keysIt().next(); + + Map addedMappings = Map.of(PROPERTIES, Map.of(DataFrameAnalyticsFields.ID, Map.of("type", "keyword"))); + + PutMappingRequest putMappingRequest = new PutMappingRequest(getIndexResponse.indices()); + putMappingRequest.type(type); + putMappingRequest.source(addedMappings); + ClientHelper.executeWithHeadersAsync(analyticsConfig.getHeaders(), ML_ORIGIN, client, PutMappingAction.INSTANCE, + putMappingRequest, listener); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java index 764ca08d735b9..c7cfe2b625369 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java @@ -5,16 +5,20 @@ */ package org.elasticsearch.xpack.ml.dataframe; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexAction; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -40,17 +44,17 @@ public class DataFrameAnalyticsManager { - private final ClusterService clusterService; + private static final Logger LOGGER = LogManager.getLogger(DataFrameAnalyticsManager.class); + /** - * We need a {@link NodeClient} to be get the reindexing task and be able to report progress + * We need a {@link NodeClient} to get the reindexing task and be able to report progress */ private final NodeClient client; private final DataFrameAnalyticsConfigProvider configProvider; private final AnalyticsProcessManager processManager; - public DataFrameAnalyticsManager(ClusterService clusterService, NodeClient client, DataFrameAnalyticsConfigProvider configProvider, + public DataFrameAnalyticsManager(NodeClient client, DataFrameAnalyticsConfigProvider configProvider, AnalyticsProcessManager processManager) { - this.clusterService = Objects.requireNonNull(clusterService); this.client = Objects.requireNonNull(client); this.configProvider = Objects.requireNonNull(configProvider); this.processManager = Objects.requireNonNull(processManager); @@ -77,7 +81,6 @@ public void execute(DataFrameAnalyticsTask task, DataFrameAnalyticsState current break; // The task has fully reindexed the documents and we should continue on with our analyses case ANALYZING: - // TODO apply previously stored model state if applicable startAnalytics(task, config, true); break; // If we are already at REINDEXING, we are not 100% sure if we reindexed ALL the docs. @@ -160,7 +163,27 @@ private void reindexDataframeAndStartAnalysis(DataFrameAnalyticsTask task, DataF reindexCompletedListener::onFailure ); - DataFrameAnalyticsIndex.createDestinationIndex(client, Clock.systemUTC(), clusterService.state(), config, copyIndexCreatedListener); + // Create destination index if it does not exist + ActionListener destIndexListener = ActionListener.wrap( + indexResponse -> { + LOGGER.info("[{}] Using existing destination index [{}]", config.getId(), indexResponse.indices()[0]); + DataFrameAnalyticsIndex.updateMappingsToDestIndex(client, config, indexResponse, ActionListener.wrap( + acknowledgedResponse -> copyIndexCreatedListener.onResponse(null), + copyIndexCreatedListener::onFailure + )); + }, + e -> { + if (org.elasticsearch.ExceptionsHelper.unwrapCause(e) instanceof IndexNotFoundException) { + LOGGER.info("[{}] Creating destination index [{}]", config.getId(), config.getDest().getIndex()); + DataFrameAnalyticsIndex.createDestinationIndex(client, Clock.systemUTC(), config, copyIndexCreatedListener); + } else { + copyIndexCreatedListener.onFailure(e); + } + } + ); + + ClientHelper.executeWithHeadersAsync(config.getHeaders(), ML_ORIGIN, client, GetIndexAction.INSTANCE, + new GetIndexRequest().indices(config.getDest().getIndex()), destIndexListener); } private void startAnalytics(DataFrameAnalyticsTask task, DataFrameAnalyticsConfig config, boolean isTaskRestarting) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/MappingsMerger.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/MappingsMerger.java new file mode 100644 index 0000000000000..f007831f7cfa4 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/MappingsMerger.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.dataframe; + +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; + +/** + * Merges mappings in a best effort and naive manner. + * The merge will fail if there is any conflict, i.e. the mappings of a field are not exactly the same. + */ +public final class MappingsMerger { + + private MappingsMerger() {} + + public static void mergeMappings(Client client, Map headers, String[] index, + ActionListener> listener) { + ActionListener mappingsListener = ActionListener.wrap( + getMappingsResponse -> listener.onResponse(MappingsMerger.mergeMappings(getMappingsResponse)), + listener::onFailure + ); + + GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); + getMappingsRequest.indices(index); + ClientHelper.executeWithHeadersAsync(headers, ML_ORIGIN, client, GetMappingsAction.INSTANCE, getMappingsRequest, mappingsListener); + } + + static ImmutableOpenMap mergeMappings(GetMappingsResponse getMappingsResponse) { + ImmutableOpenMap> indexToMappings = getMappingsResponse.getMappings(); + + String type = null; + Map mergedMappings = new HashMap<>(); + + Iterator>> iterator = indexToMappings.iterator(); + while (iterator.hasNext()) { + ObjectObjectCursor> indexMappings = iterator.next(); + Iterator> typeIterator = indexMappings.value.iterator(); + while (typeIterator.hasNext()) { + ObjectObjectCursor typeMapping = typeIterator.next(); + if (type == null) { + type = typeMapping.key; + } else { + if (type.equals(typeMapping.key) == false) { + throw ExceptionsHelper.badRequestException("source indices contain mappings for different types: [{}, {}]", + type, typeMapping.key); + } + } + Map currentMappings = typeMapping.value.getSourceAsMap(); + if (currentMappings.containsKey("properties")) { + + @SuppressWarnings("unchecked") + Map fieldMappings = (Map) currentMappings.get("properties"); + + for (Map.Entry fieldMapping : fieldMappings.entrySet()) { + if (mergedMappings.containsKey(fieldMapping.getKey())) { + if (mergedMappings.get(fieldMapping.getKey()).equals(fieldMapping.getValue()) == false) { + throw ExceptionsHelper.badRequestException("cannot merge mappings because of differences for field [{}]", + fieldMapping.getKey()); + } + } else { + mergedMappings.put(fieldMapping.getKey(), fieldMapping.getValue()); + } + } + } + } + } + + MappingMetaData mappingMetaData = createMappingMetaData(type, mergedMappings); + ImmutableOpenMap.Builder result = ImmutableOpenMap.builder(); + result.put(type, mappingMetaData); + return result.build(); + } + + private static MappingMetaData createMappingMetaData(String type, Map mappings) { + try { + return new MappingMetaData(type, Collections.singletonMap("properties", mappings)); + } catch (IOException e) { + throw ExceptionsHelper.serverError("Failed to parse mappings: " + mappings); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/SourceDestValidator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/SourceDestValidator.java index f607387e31774..01803dc4359d5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/SourceDestValidator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/SourceDestValidator.java @@ -29,10 +29,13 @@ public SourceDestValidator(ClusterState clusterState, IndexNameExpressionResolve } public void check(DataFrameAnalyticsConfig config) { - String sourceIndex = config.getSource().getIndex(); + String[] sourceIndex = config.getSource().getIndex(); String destIndex = config.getDest().getIndex(); - String[] sourceExpressions = Strings.tokenizeToStringArray(sourceIndex, ","); + String[] sourceExpressions = Arrays.stream(sourceIndex) + .map(index -> Strings.tokenizeToStringArray(index, ",")) + .flatMap(Arrays::stream) + .toArray(String[]::new); for (String sourceExpression : sourceExpressions) { if (Regex.simpleMatch(sourceExpression, destIndex)) { @@ -45,7 +48,7 @@ public void check(DataFrameAnalyticsConfig config) { IndicesOptions.lenientExpandOpen(), sourceExpressions))); if (concreteSourceIndexNames.isEmpty()) { - throw ExceptionsHelper.badRequestException("No index matches source index [{}]", sourceIndex); + throw ExceptionsHelper.badRequestException("No index matches source index {}", Arrays.toString(sourceIndex)); } final String[] concreteDestIndexNames = indexNameExpressionResolver.concreteIndexNames(clusterState, @@ -59,7 +62,7 @@ public void check(DataFrameAnalyticsConfig config) { if (concreteDestIndexNames.length == 1 && concreteSourceIndexNames.contains(concreteDestIndexNames[0])) { // In case the dest index is an alias, we need to check the concrete index is not matched by source throw ExceptionsHelper.badRequestException("Destination index [{}], which is an alias for [{}], " + - "must not be included in source index [{}]", destIndex, concreteDestIndexNames[0], sourceIndex); + "must not be included in source index {}", destIndex, concreteDestIndexNames[0], Arrays.toString(sourceIndex)); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorFactory.java index baf77c420c5cb..cacf00ad9e9bc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorFactory.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; @@ -73,7 +74,7 @@ public static void create(Client client, DataFrameAnalyticsConfig config, boolean isTaskRestarting, ActionListener listener) { - validateIndexAndExtractFields(client, config.getDest().getIndex(), config, isTaskRestarting, + validateIndexAndExtractFields(client, new String[] {config.getDest().getIndex()}, config, isTaskRestarting, ActionListener.wrap(extractedFields -> listener.onResponse(new DataFrameDataExtractorFactory( client, config.getId(), config.getDest().getIndex(), extractedFields, config.getHeaders())), listener::onFailure @@ -100,7 +101,7 @@ public static void validateConfigAndSourceIndex(Client client, } private static void validateIndexAndExtractFields(Client client, - String index, + String[] index, DataFrameAnalyticsConfig config, boolean isTaskRestarting, ActionListener listener) { @@ -120,6 +121,7 @@ private static void validateIndexAndExtractFields(Client client, FieldCapabilitiesRequest fieldCapabilitiesRequest = new FieldCapabilitiesRequest(); fieldCapabilitiesRequest.indices(index); + fieldCapabilitiesRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); fieldCapabilitiesRequest.fields("*"); ClientHelper.executeWithHeaders(config.getHeaders(), ClientHelper.ML_ORIGIN, client, () -> { client.execute(FieldCapabilitiesAction.INSTANCE, fieldCapabilitiesRequest, fieldCapabilitiesHandler); @@ -134,7 +136,7 @@ private static void validateIndexAndExtractFields(Client client, getDocValueFieldsLimit(client, index, docValueFieldsLimitListener); } - private static void getDocValueFieldsLimit(Client client, String index, ActionListener docValueFieldsLimitListener) { + private static void getDocValueFieldsLimit(Client client, String[] index, ActionListener docValueFieldsLimitListener) { ActionListener settingsListener = ActionListener.wrap(getSettingsResponse -> { Integer minDocValueFieldsLimit = Integer.MAX_VALUE; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetector.java index b36fc6f182a06..d58eaebe353e9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetector.java @@ -55,13 +55,13 @@ public class ExtractedFieldsDetector { COMPATIBLE_FIELD_TYPES = Collections.unmodifiableSet(compatibleTypes); } - private final String index; + private final String[] index; private final DataFrameAnalyticsConfig config; private final boolean isTaskRestarting; private final int docValueFieldsLimit; private final FieldCapabilitiesResponse fieldCapabilitiesResponse; - ExtractedFieldsDetector(String index, DataFrameAnalyticsConfig config, boolean isTaskRestarting, int docValueFieldsLimit, + ExtractedFieldsDetector(String[] index, DataFrameAnalyticsConfig config, boolean isTaskRestarting, int docValueFieldsLimit, FieldCapabilitiesResponse fieldCapabilitiesResponse) { this.index = Objects.requireNonNull(index); this.config = Objects.requireNonNull(config); @@ -74,7 +74,7 @@ public ExtractedFields detect() { Set fields = new HashSet<>(fieldCapabilitiesResponse.get().keySet()); fields.removeAll(IGNORE_FIELDS); - checkResultsFieldIsNotPresent(fields, index); + checkResultsFieldIsNotPresent(); // Ignore fields under the results object fields.removeIf(field -> field.startsWith(config.getDest().getResultsField() + ".")); @@ -87,7 +87,7 @@ public ExtractedFields detect() { ExtractedFields extractedFields = ExtractedFields.build(sortedFields, Collections.emptySet(), fieldCapabilitiesResponse) .filterFields(ExtractedField.ExtractionMethod.DOC_VALUE); if (extractedFields.getAllFields().isEmpty()) { - throw ExceptionsHelper.badRequestException("No compatible fields could be detected in index [{}]", index); + throw ExceptionsHelper.badRequestException("No compatible fields could be detected in index {}", Arrays.toString(index)); } if (extractedFields.getDocValueFields().size() > docValueFieldsLimit) { extractedFields = fetchFromSourceIfSupported(extractedFields); @@ -100,11 +100,16 @@ public ExtractedFields detect() { return extractedFields; } - private void checkResultsFieldIsNotPresent(Set fields, String index) { + private void checkResultsFieldIsNotPresent() { // If the task is restarting we do not mind the index containing the results field, we will overwrite all docs - if (isTaskRestarting == false && fields.contains(config.getDest().getResultsField())) { - throw ExceptionsHelper.badRequestException("Index [{}] already has a field that matches the {}.{} [{}];" + - " please set a different {}", index, DataFrameAnalyticsConfig.DEST.getPreferredName(), + if (isTaskRestarting) { + return; + } + + Map indexToFieldCaps = fieldCapabilitiesResponse.getField(config.getDest().getResultsField()); + if (indexToFieldCaps != null && indexToFieldCaps.isEmpty() == false) { + throw ExceptionsHelper.badRequestException("A field that matches the {}.{} [{}] already exists;" + + " please set a different {}", DataFrameAnalyticsConfig.DEST.getPreferredName(), DataFrameAnalyticsDest.RESULTS_FIELD.getPreferredName(), config.getDest().getResultsField(), DataFrameAnalyticsDest.RESULTS_FIELD.getPreferredName()); } @@ -121,7 +126,7 @@ private void removeFieldsWithIncompatibleTypes(Set fields) { } } - private void includeAndExcludeFields(Set fields, String index) { + private void includeAndExcludeFields(Set fields, String[] index) { FetchSourceContext analyzedFields = config.getAnalyzedFields(); if (analyzedFields == null) { return; @@ -136,12 +141,14 @@ private void includeAndExcludeFields(Set fields, String index) { // If the inclusion set does not match anything, that means the user's desired fields cannot be found in // the collection of supported field types. We should let the user know. Set includedSet = NameResolver.newUnaliased(fields, - (ex) -> new ResourceNotFoundException(Messages.getMessage(Messages.DATA_FRAME_ANALYTICS_BAD_FIELD_FILTER, index, ex))) + (ex) -> new ResourceNotFoundException( + Messages.getMessage(Messages.DATA_FRAME_ANALYTICS_BAD_FIELD_FILTER, ex))) .expand(includes, false); // If the exclusion set does not match anything, that means the fields are already not present // no need to raise if nothing matched Set excludedSet = NameResolver.newUnaliased(fields, - (ex) -> new ResourceNotFoundException(Messages.getMessage(Messages.DATA_FRAME_ANALYTICS_BAD_FIELD_FILTER, index, ex))) + (ex) -> new ResourceNotFoundException( + Messages.getMessage(Messages.DATA_FRAME_ANALYTICS_BAD_FIELD_FILTER, ex))) .expand(excludes, true); fields.retainAll(includedSet); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java index f341622562a2e..bd6bdb3552cce 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java @@ -10,17 +10,21 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; @@ -33,26 +37,23 @@ import java.time.Clock; import java.time.Instant; import java.time.ZoneId; +import java.util.Arrays; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class DataFrameAnalyticsIndexTests extends ESTestCase { - private static final String CLUSTER_NAME = "some-cluster-name"; - private static final String ANALYTICS_ID = "some-analytics-id"; - private static final String SOURCE_INDEX = "source-index"; + private static final String[] SOURCE_INDEX = new String[] {"source-index"}; private static final String DEST_INDEX = "dest-index"; private static final DataFrameAnalyticsConfig ANALYTICS_CONFIG = new DataFrameAnalyticsConfig.Builder(ANALYTICS_ID) @@ -70,6 +71,8 @@ public class DataFrameAnalyticsIndexTests extends ESTestCase { public void testCreateDestinationIndex() throws IOException { when(client.threadPool()).thenReturn(threadPool); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + + ArgumentCaptor createIndexRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); doAnswer( invocationOnMock -> { @SuppressWarnings("unchecked") @@ -77,58 +80,96 @@ public void testCreateDestinationIndex() throws IOException { listener.onResponse(null); return null; }) - .when(client).execute(any(), any(), any()); - - ClusterState clusterState = - ClusterState.builder(new ClusterName(CLUSTER_NAME)) - .metaData(MetaData.builder() - .put(IndexMetaData.builder(SOURCE_INDEX) - .settings(Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) - .putMapping(new MappingMetaData("_doc", Map.of("properties", Map.of()))))) - .build(); + .when(client).execute(eq(CreateIndexAction.INSTANCE), createIndexRequestCaptor.capture(), any()); + + Settings index1Settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + + Settings index2Settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + + ArgumentCaptor getSettingsRequestCaptor = ArgumentCaptor.forClass(GetSettingsRequest.class); + ArgumentCaptor getMappingsRequestCaptor = ArgumentCaptor.forClass(GetMappingsRequest.class); + + ImmutableOpenMap.Builder indexToSettings = ImmutableOpenMap.builder(); + indexToSettings.put("index_1", index1Settings); + indexToSettings.put("index_2", index2Settings); + + GetSettingsResponse getSettingsResponse = new GetSettingsResponse(indexToSettings.build(), ImmutableOpenMap.of()); + + doAnswer( + invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(getSettingsResponse); + return null; + } + ).when(client).execute(eq(GetSettingsAction.INSTANCE), getSettingsRequestCaptor.capture(), any()); + + Map index1Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); + MappingMetaData index1MappingMetaData = new MappingMetaData("_doc", index1Mappings); + + Map index2Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); + MappingMetaData index2MappingMetaData = new MappingMetaData("_doc", index2Mappings); + + ImmutableOpenMap.Builder index1MappingsMap = ImmutableOpenMap.builder(); + index1MappingsMap.put("_doc", index1MappingMetaData); + ImmutableOpenMap.Builder index2MappingsMap = ImmutableOpenMap.builder(); + index2MappingsMap.put("_doc", index2MappingMetaData); + + ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + mappings.put("index_1", index1MappingsMap.build()); + mappings.put("index_2", index2MappingsMap.build()); + + GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); + + doAnswer( + invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(getMappingsResponse); + return null; + } + ).when(client).execute(eq(GetMappingsAction.INSTANCE), getMappingsRequestCaptor.capture(), any()); + DataFrameAnalyticsIndex.createDestinationIndex( client, clock, - clusterState, ANALYTICS_CONFIG, ActionListener.wrap( response -> {}, e -> fail(e.getMessage()))); - ArgumentCaptor createIndexRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); - verify(client, atLeastOnce()).threadPool(); - verify(client).execute(eq(CreateIndexAction.INSTANCE), createIndexRequestCaptor.capture(), any()); - verifyNoMoreInteractions(client); + GetSettingsRequest capturedGetSettingsRequest = getSettingsRequestCaptor.getValue(); + assertThat(capturedGetSettingsRequest.indices(), equalTo(SOURCE_INDEX)); + assertThat(capturedGetSettingsRequest.indicesOptions(), equalTo(IndicesOptions.lenientExpandOpen())); + assertThat(Arrays.asList(capturedGetSettingsRequest.names()), contains("index.number_of_shards", "index.number_of_replicas")); + + assertThat(getMappingsRequestCaptor.getValue().indices(), equalTo(SOURCE_INDEX)); CreateIndexRequest createIndexRequest = createIndexRequestCaptor.getValue(); + + assertThat(createIndexRequest.settings().keySet(), + containsInAnyOrder("index.number_of_shards", "index.number_of_replicas", "index.sort.field", "index.sort.order")); + assertThat(createIndexRequest.settings().getAsInt("index.number_of_shards", -1), equalTo(5)); + assertThat(createIndexRequest.settings().getAsInt("index.number_of_replicas", -1), equalTo(1)); + assertThat(createIndexRequest.settings().get("index.sort.field"), equalTo("_id_copy")); + assertThat(createIndexRequest.settings().get("index.sort.order"), equalTo("asc")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, createIndexRequest.mappings().get("_doc"))) { Map map = parser.map(); assertThat(extractValue("_doc.properties._id_copy.type", map), equalTo("keyword")); + assertThat(extractValue("_doc.properties.field_1", map), equalTo("field_1_mappings")); + assertThat(extractValue("_doc.properties.field_2", map), equalTo("field_2_mappings")); assertThat(extractValue("_doc._meta.analytics", map), equalTo(ANALYTICS_ID)); assertThat(extractValue("_doc._meta.creation_date_in_millis", map), equalTo(CURRENT_TIME_MILLIS)); assertThat(extractValue("_doc._meta.created_by", map), equalTo(CREATED_BY)); } } - - public void testCreateDestinationIndex_IndexNotFound() { - ClusterState clusterState = - ClusterState.builder(new ClusterName(CLUSTER_NAME)) - .metaData(MetaData.builder()) - .build(); - DataFrameAnalyticsIndex.createDestinationIndex( - client, - clock, - clusterState, - ANALYTICS_CONFIG, - ActionListener.wrap( - response -> fail("IndexNotFoundException should be thrown"), - e -> { - assertThat(e, instanceOf(IndexNotFoundException.class)); - IndexNotFoundException infe = (IndexNotFoundException) e; - assertThat(infe.getIndex().getName(), equalTo(SOURCE_INDEX)); - })); - } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/MappingsMergerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/MappingsMergerTests.java new file mode 100644 index 0000000000000..670f6e559bd4f --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/MappingsMergerTests.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.dataframe; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Map; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class MappingsMergerTests extends ESTestCase { + + public void testMergeMappings_GivenIndicesWithIdenticalMappings() throws IOException { + Map index1Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); + MappingMetaData index1MappingMetaData = new MappingMetaData("_doc", index1Mappings); + + Map index2Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); + MappingMetaData index2MappingMetaData = new MappingMetaData("_doc", index2Mappings); + + ImmutableOpenMap.Builder index1MappingsMap = ImmutableOpenMap.builder(); + index1MappingsMap.put("_doc", index1MappingMetaData); + ImmutableOpenMap.Builder index2MappingsMap = ImmutableOpenMap.builder(); + index2MappingsMap.put("_doc", index2MappingMetaData); + + ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + mappings.put("index_1", index1MappingsMap.build()); + mappings.put("index_2", index2MappingsMap.build()); + + GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); + + ImmutableOpenMap mergedMappings = MappingsMerger.mergeMappings(getMappingsResponse); + + assertThat(mergedMappings.size(), equalTo(1)); + assertThat(mergedMappings.containsKey("_doc"), is(true)); + assertThat(mergedMappings.valuesIt().next().getSourceAsMap(), equalTo(index1Mappings)); + } + + public void testMergeMappings_GivenIndicesWithDifferentTypes() throws IOException { + Map index1Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings")); + MappingMetaData index1MappingMetaData = new MappingMetaData("_doc", index1Mappings); + + Map index2Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings")); + MappingMetaData index2MappingMetaData = new MappingMetaData("_doc", index2Mappings); + + ImmutableOpenMap.Builder index1MappingsMap = ImmutableOpenMap.builder(); + index1MappingsMap.put("type_1", index1MappingMetaData); + ImmutableOpenMap.Builder index2MappingsMap = ImmutableOpenMap.builder(); + index2MappingsMap.put("type_2", index2MappingMetaData); + + ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + mappings.put("index_1", index1MappingsMap.build()); + mappings.put("index_2", index2MappingsMap.build()); + + GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); + + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> MappingsMerger.mergeMappings(getMappingsResponse)); + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + assertThat(e.getMessage(), containsString("source indices contain mappings for different types:")); + assertThat(e.getMessage(), containsString("type_1")); + assertThat(e.getMessage(), containsString("type_2")); + } + + public void testMergeMappings_GivenFieldWithDifferentMapping() throws IOException { + Map index1Mappings = Map.of("properties", Map.of("field_1", "field_1_mappings")); + MappingMetaData index1MappingMetaData = new MappingMetaData("_doc", index1Mappings); + + Map index2Mappings = Map.of("properties", Map.of("field_1", "different_field_1_mappings")); + MappingMetaData index2MappingMetaData = new MappingMetaData("_doc", index2Mappings); + + ImmutableOpenMap.Builder index1MappingsMap = ImmutableOpenMap.builder(); + index1MappingsMap.put("_doc", index1MappingMetaData); + ImmutableOpenMap.Builder index2MappingsMap = ImmutableOpenMap.builder(); + index2MappingsMap.put("_doc", index2MappingMetaData); + + ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + mappings.put("index_1", index1MappingsMap.build()); + mappings.put("index_2", index2MappingsMap.build()); + + GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); + + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> MappingsMerger.mergeMappings(getMappingsResponse)); + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + assertThat(e.getMessage(), equalTo("cannot merge mappings because of differences for field [field_1]")); + } + + public void testMergeMappings_GivenIndicesWithDifferentMappingsButNoConflicts() throws IOException { + Map index1Mappings = Map.of("properties", + Map.of("field_1", "field_1_mappings", "field_2", "field_2_mappings")); + MappingMetaData index1MappingMetaData = new MappingMetaData("_doc", index1Mappings); + + Map index2Mappings = Map.of("properties", + Map.of("field_1", "field_1_mappings", "field_3", "field_3_mappings")); + MappingMetaData index2MappingMetaData = new MappingMetaData("_doc", index2Mappings); + + ImmutableOpenMap.Builder index1MappingsMap = ImmutableOpenMap.builder(); + index1MappingsMap.put("_doc", index1MappingMetaData); + ImmutableOpenMap.Builder index2MappingsMap = ImmutableOpenMap.builder(); + index2MappingsMap.put("_doc", index2MappingMetaData); + + ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + mappings.put("index_1", index1MappingsMap.build()); + mappings.put("index_2", index2MappingsMap.build()); + + GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); + + ImmutableOpenMap mergedMappings = MappingsMerger.mergeMappings(getMappingsResponse); + + assertThat(mergedMappings.size(), equalTo(1)); + assertThat(mergedMappings.containsKey("_doc"), is(true)); + Map mappingsAsMap = mergedMappings.valuesIt().next().getSourceAsMap(); + assertThat(mappingsAsMap.size(), equalTo(1)); + assertThat(mappingsAsMap.containsKey("properties"), is(true)); + + @SuppressWarnings("unchecked") + Map fieldMappings = (Map) mappingsAsMap.get("properties"); + + assertThat(fieldMappings.size(), equalTo(3)); + assertThat(fieldMappings.keySet(), containsInAnyOrder("field_1", "field_2", "field_3")); + assertThat(fieldMappings.get("field_1"), equalTo("field_1_mappings")); + assertThat(fieldMappings.get("field_2"), equalTo("field_2_mappings")); + assertThat(fieldMappings.get("field_3"), equalTo("field_3_mappings")); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/SourceDestValidatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/SourceDestValidatorTests.java index fb91673b7a509..d48d079314aa6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/SourceDestValidatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/SourceDestValidatorTests.java @@ -64,7 +64,7 @@ public class SourceDestValidatorTests extends ESTestCase { public void testCheck_GivenSimpleSourceIndexAndValidDestIndex() { DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder("test") - .setSource(new DataFrameAnalyticsSource("source-1", null)) + .setSource(createSource("source-1")) .setDest(new DataFrameAnalyticsDest("dest", null)) .setAnalysis(new OutlierDetection()) .build(); @@ -75,7 +75,7 @@ public void testCheck_GivenSimpleSourceIndexAndValidDestIndex() { public void testCheck_GivenMissingConcreteSourceIndex() { DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder("test") - .setSource(new DataFrameAnalyticsSource("missing", null)) + .setSource(createSource("missing")) .setDest(new DataFrameAnalyticsDest("dest", null)) .setAnalysis(new OutlierDetection()) .build(); @@ -89,7 +89,7 @@ public void testCheck_GivenMissingConcreteSourceIndex() { public void testCheck_GivenMissingWildcardSourceIndex() { DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder("test") - .setSource(new DataFrameAnalyticsSource("missing*", null)) + .setSource(createSource("missing*")) .setDest(new DataFrameAnalyticsDest("dest", null)) .setAnalysis(new OutlierDetection()) .build(); @@ -103,7 +103,7 @@ public void testCheck_GivenMissingWildcardSourceIndex() { public void testCheck_GivenDestIndexSameAsSourceIndex() { DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder("test") - .setSource(new DataFrameAnalyticsSource("source-1", null)) + .setSource(createSource("source-1")) .setDest(new DataFrameAnalyticsDest("source-1", null)) .setAnalysis(new OutlierDetection()) .build(); @@ -117,7 +117,7 @@ public void testCheck_GivenDestIndexSameAsSourceIndex() { public void testCheck_GivenDestIndexMatchesSourceIndex() { DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder("test") - .setSource(new DataFrameAnalyticsSource("source-*", null)) + .setSource(createSource("source-*")) .setDest(new DataFrameAnalyticsDest(SOURCE_2, null)) .setAnalysis(new OutlierDetection()) .build(); @@ -131,7 +131,7 @@ public void testCheck_GivenDestIndexMatchesSourceIndex() { public void testCheck_GivenDestIndexMatchesOneOfSourceIndices() { DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder("test") - .setSource(new DataFrameAnalyticsSource("source-1,source-*", null)) + .setSource(createSource("source-1,source-*")) .setDest(new DataFrameAnalyticsDest(SOURCE_2, null)) .setAnalysis(new OutlierDetection()) .build(); @@ -145,7 +145,7 @@ public void testCheck_GivenDestIndexMatchesOneOfSourceIndices() { public void testCheck_GivenDestIndexIsAliasThatMatchesMultipleIndices() { DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder("test") - .setSource(new DataFrameAnalyticsSource(SOURCE_1, null)) + .setSource(createSource(SOURCE_1)) .setDest(new DataFrameAnalyticsDest("dest-alias", null)) .setAnalysis(new OutlierDetection()) .build(); @@ -160,7 +160,7 @@ public void testCheck_GivenDestIndexIsAliasThatMatchesMultipleIndices() { public void testCheck_GivenDestIndexIsAliasThatIsIncludedInSource() { DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder("test") - .setSource(new DataFrameAnalyticsSource("source-1", null)) + .setSource(createSource("source-1")) .setDest(new DataFrameAnalyticsDest("source-1-alias", null)) .setAnalysis(new OutlierDetection()) .build(); @@ -173,4 +173,8 @@ public void testCheck_GivenDestIndexIsAliasThatIsIncludedInSource() { equalTo("Destination index [source-1-alias], which is an alias for [source-1], " + "must not be included in source index [source-1]")); } + + private static DataFrameAnalyticsSource createSource(String... index) { + return new DataFrameAnalyticsSource(index, null); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java index c035c44f117f4..1345a1fe1287b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java @@ -33,7 +33,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { - private static final String SOURCE_INDEX = "source_index"; + private static final String[] SOURCE_INDEX = new String[] { "source_index" }; private static final String DEST_INDEX = "dest_index"; private static final String RESULTS_FIELD = "ml"; @@ -154,7 +154,7 @@ public void testDetectedExtractedFields_GivenIncludeWithMissingField() { SOURCE_INDEX, buildAnalyticsConfig(desiredFields), false, 100, fieldCapabilities); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> extractedFieldsDetector.detect()); - assertThat(e.getMessage(), equalTo("No compatible fields could be detected in index [source_index] with name [your_field1]")); + assertThat(e.getMessage(), equalTo("No field [your_field1] could be detected")); } public void testDetectedExtractedFields_GivenExcludeAllValidFields() { @@ -202,7 +202,7 @@ public void testDetectedExtractedFields_GivenIndexContainsResultsField() { SOURCE_INDEX, buildAnalyticsConfig(), false, 100, fieldCapabilities); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> extractedFieldsDetector.detect()); - assertThat(e.getMessage(), equalTo("Index [source_index] already has a field that matches the dest.results_field [ml]; " + + assertThat(e.getMessage(), equalTo("A field that matches the dest.results_field [ml] already exists; " + "please set a different results_field")); } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml index 01afb7714f395..b5860a8d55e51 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml @@ -50,7 +50,7 @@ setup: "analyzed_fields": [ "obj1.*", "obj2.*" ] } - match: { id: "simple-outlier-detection-with-query" } - - match: { source.index: "index-source" } + - match: { source.index: ["index-source"] } - match: { source.query: {"term" : { "user" : "Kimchy"} } } - match: { dest.index: "index-dest" } - match: { analysis: {"outlier_detection":{}} } @@ -145,7 +145,7 @@ setup: "analysis": {"outlier_detection":{}} } - match: { id: "simple-outlier-detection" } - - match: { source.index: "index-source" } + - match: { source.index: ["index-source"] } - match: { source.query: {"match_all" : {} } } - match: { dest.index: "index-dest" } - match: { analysis: {"outlier_detection":{}} } @@ -175,7 +175,7 @@ setup: } } - match: { id: "custom-outlier-detection" } - - match: { source.index: "index-source" } + - match: { source.index: ["index-source"] } - match: { source.query: {"match_all" : {} } } - match: { dest.index: "index-dest" } - match: { analysis.outlier_detection.n_neighbors: 5 } @@ -427,16 +427,34 @@ setup: } --- -"Test put config given source with empty index": +"Test put config given source with empty index array": - do: - catch: /\[index\] must be non-empty/ + catch: /source\.index must specify at least one index/ ml.put_data_frame_analytics: id: "simple-outlier-detection" body: > { "source": { - "index": "" + "index": [] + }, + "dest": { + "index": "index-dest" + }, + "analysis": {"outlier_detection":{}} + } + +--- +"Test put config given source with empty string in index array": + + - do: + catch: /source\.index must contain non-null and non-empty strings/ + ml.put_data_frame_analytics: + id: "simple-outlier-detection" + body: > + { + "source": { + "index": [""] }, "dest": { "index": "index-dest" @@ -889,7 +907,7 @@ setup: "analyzed_fields": [ "obj1.*", "obj2.*" ] } - match: { id: "simple-outlier-detection-with-query" } - - match: { source.index: "index-source" } + - match: { source.index: ["index-source"] } - match: { source.query: {"term" : { "user" : "Kimchy"} } } - match: { dest.index: "index-dest" } - match: { analysis: {"outlier_detection":{}} } From 04359a60b4972c3a87a1144e7a105058a75688b0 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Fri, 28 Jun 2019 11:29:37 +0300 Subject: [PATCH 052/140] [ML] Rename outlier score setting to feature_influence_threshold (#43705) Renames outlier score setting `minimum_score_to_write_feature_influence` to `feature_influence_threshold`. --- .../client/ml/dataframe/OutlierDetection.java | 33 +++++++++--------- .../ml/dataframe/OutlierDetectionTests.java | 8 ++--- .../dataframe/analyses/OutlierDetection.java | 34 +++++++++---------- .../persistence/ElasticsearchMappings.java | 2 +- .../ml/job/results/ReservedFieldNames.java | 2 +- .../analyses/OutlierDetectionTests.java | 2 +- .../test/ml/data_frame_analytics_crud.yml | 4 +-- 7 files changed, 41 insertions(+), 44 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java index 946c01ac5c835..fe5094fb7190a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java @@ -47,8 +47,7 @@ public static Builder builder() { public static final ParseField NAME = new ParseField("outlier_detection"); static final ParseField N_NEIGHBORS = new ParseField("n_neighbors"); static final ParseField METHOD = new ParseField("method"); - public static final ParseField MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE = - new ParseField("minimum_score_to_write_feature_influence"); + public static final ParseField FEATURE_INFLUENCE_THRESHOLD = new ParseField("feature_influence_threshold"); private static ObjectParser PARSER = new ObjectParser<>(NAME.getPreferredName(), true, Builder::new); @@ -60,23 +59,23 @@ public static Builder builder() { } throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); }, METHOD, ObjectParser.ValueType.STRING); - PARSER.declareDouble(Builder::setMinScoreToWriteFeatureInfluence, MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE); + PARSER.declareDouble(Builder::setFeatureInfluenceThreshold, FEATURE_INFLUENCE_THRESHOLD); } private final Integer nNeighbors; private final Method method; - private final Double minScoreToWriteFeatureInfluence; + private final Double featureInfluenceThreshold; /** * Constructs the outlier detection configuration * @param nNeighbors The number of neighbors. Leave unspecified for dynamic detection. * @param method The method. Leave unspecified for a dynamic mixture of methods. - * @param minScoreToWriteFeatureInfluence The min outlier score required to calculate feature influence. Defaults to 0.1. + * @param featureInfluenceThreshold The min outlier score required to calculate feature influence. Defaults to 0.1. */ - private OutlierDetection(@Nullable Integer nNeighbors, @Nullable Method method, @Nullable Double minScoreToWriteFeatureInfluence) { + private OutlierDetection(@Nullable Integer nNeighbors, @Nullable Method method, @Nullable Double featureInfluenceThreshold) { this.nNeighbors = nNeighbors; this.method = method; - this.minScoreToWriteFeatureInfluence = minScoreToWriteFeatureInfluence; + this.featureInfluenceThreshold = featureInfluenceThreshold; } @Override @@ -92,8 +91,8 @@ public Method getMethod() { return method; } - public Double getMinScoreToWriteFeatureInfluence() { - return minScoreToWriteFeatureInfluence; + public Double getFeatureInfluenceThreshold() { + return featureInfluenceThreshold; } @Override @@ -105,8 +104,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (method != null) { builder.field(METHOD.getPreferredName(), method); } - if (minScoreToWriteFeatureInfluence != null) { - builder.field(MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE.getPreferredName(), minScoreToWriteFeatureInfluence); + if (featureInfluenceThreshold != null) { + builder.field(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold); } builder.endObject(); return builder; @@ -120,12 +119,12 @@ public boolean equals(Object o) { OutlierDetection other = (OutlierDetection) o; return Objects.equals(nNeighbors, other.nNeighbors) && Objects.equals(method, other.method) - && Objects.equals(minScoreToWriteFeatureInfluence, other.minScoreToWriteFeatureInfluence); + && Objects.equals(featureInfluenceThreshold, other.featureInfluenceThreshold); } @Override public int hashCode() { - return Objects.hash(nNeighbors, method, minScoreToWriteFeatureInfluence); + return Objects.hash(nNeighbors, method, featureInfluenceThreshold); } @Override @@ -150,7 +149,7 @@ public static class Builder { private Integer nNeighbors; private Method method; - private Double minScoreToWriteFeatureInfluence; + private Double featureInfluenceThreshold; private Builder() {} @@ -164,13 +163,13 @@ public Builder setMethod(Method method) { return this; } - public Builder setMinScoreToWriteFeatureInfluence(Double minScoreToWriteFeatureInfluence) { - this.minScoreToWriteFeatureInfluence = minScoreToWriteFeatureInfluence; + public Builder setFeatureInfluenceThreshold(Double featureInfluenceThreshold) { + this.featureInfluenceThreshold = featureInfluenceThreshold; return this; } public OutlierDetection build() { - return new OutlierDetection(nNeighbors, method, minScoreToWriteFeatureInfluence); + return new OutlierDetection(nNeighbors, method, featureInfluenceThreshold); } } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/OutlierDetectionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/OutlierDetectionTests.java index de110d92fdee1..7307999a2bf71 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/OutlierDetectionTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/OutlierDetectionTests.java @@ -33,7 +33,7 @@ public static OutlierDetection randomOutlierDetection() { return OutlierDetection.builder() .setNNeighbors(randomBoolean() ? null : randomIntBetween(1, 20)) .setMethod(randomBoolean() ? null : randomFrom(OutlierDetection.Method.values())) - .setMinScoreToWriteFeatureInfluence(randomBoolean() ? null : randomDoubleBetween(0.0, 1.0, true)) + .setFeatureInfluenceThreshold(randomBoolean() ? null : randomDoubleBetween(0.0, 1.0, true)) .build(); } @@ -56,7 +56,7 @@ public void testGetParams_GivenDefaults() { OutlierDetection outlierDetection = OutlierDetection.createDefault(); assertNull(outlierDetection.getNNeighbors()); assertNull(outlierDetection.getMethod()); - assertNull(outlierDetection.getMinScoreToWriteFeatureInfluence()); + assertNull(outlierDetection.getFeatureInfluenceThreshold()); } public void testGetParams_GivenExplicitValues() { @@ -64,10 +64,10 @@ public void testGetParams_GivenExplicitValues() { OutlierDetection.builder() .setNNeighbors(42) .setMethod(OutlierDetection.Method.LDOF) - .setMinScoreToWriteFeatureInfluence(0.5) + .setFeatureInfluenceThreshold(0.5) .build(); assertThat(outlierDetection.getNNeighbors(), equalTo(42)); assertThat(outlierDetection.getMethod(), equalTo(OutlierDetection.Method.LDOF)); - assertThat(outlierDetection.getMinScoreToWriteFeatureInfluence(), closeTo(0.5, 1E-9)); + assertThat(outlierDetection.getFeatureInfluenceThreshold(), closeTo(0.5, 1E-9)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetection.java index 91eb02b7bcdfe..e6891116ad68b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetection.java @@ -27,8 +27,7 @@ public class OutlierDetection implements DataFrameAnalysis { public static final ParseField N_NEIGHBORS = new ParseField("n_neighbors"); public static final ParseField METHOD = new ParseField("method"); - public static final ParseField MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE = - new ParseField("minimum_score_to_write_feature_influence"); + public static final ParseField FEATURE_INFLUENCE_THRESHOLD = new ParseField("feature_influence_threshold"); private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); private static final ConstructingObjectParser STRICT_PARSER = createParser(false); @@ -43,7 +42,7 @@ private static ConstructingObjectParser createParser(boo } throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); }, METHOD, ObjectParser.ValueType.STRING); - parser.declareDouble(ConstructingObjectParser.optionalConstructorArg(), MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE); + parser.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_INFLUENCE_THRESHOLD); return parser; } @@ -53,27 +52,26 @@ public static OutlierDetection fromXContent(XContentParser parser, boolean ignor private final Integer nNeighbors; private final Method method; - private final Double minScoreToWriteFeatureInfluence; + private final Double featureInfluenceThreshold; /** * Constructs the outlier detection configuration * @param nNeighbors The number of neighbors. Leave unspecified for dynamic detection. * @param method The method. Leave unspecified for a dynamic mixture of methods. - * @param minScoreToWriteFeatureInfluence The min outlier score required to calculate feature influence. Defaults to 0.1. + * @param featureInfluenceThreshold The min outlier score required to calculate feature influence. Defaults to 0.1. */ - public OutlierDetection(@Nullable Integer nNeighbors, @Nullable Method method, @Nullable Double minScoreToWriteFeatureInfluence) { + public OutlierDetection(@Nullable Integer nNeighbors, @Nullable Method method, @Nullable Double featureInfluenceThreshold) { if (nNeighbors != null && nNeighbors <= 0) { throw ExceptionsHelper.badRequestException("[{}] must be a positive integer", N_NEIGHBORS.getPreferredName()); } - if (minScoreToWriteFeatureInfluence != null && (minScoreToWriteFeatureInfluence < 0.0 || minScoreToWriteFeatureInfluence > 1.0)) { - throw ExceptionsHelper.badRequestException("[{}] must be in [0, 1]", - MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE.getPreferredName()); + if (featureInfluenceThreshold != null && (featureInfluenceThreshold < 0.0 || featureInfluenceThreshold > 1.0)) { + throw ExceptionsHelper.badRequestException("[{}] must be in [0, 1]", FEATURE_INFLUENCE_THRESHOLD.getPreferredName()); } this.nNeighbors = nNeighbors; this.method = method; - this.minScoreToWriteFeatureInfluence = minScoreToWriteFeatureInfluence; + this.featureInfluenceThreshold = featureInfluenceThreshold; } /** @@ -86,7 +84,7 @@ public OutlierDetection() { public OutlierDetection(StreamInput in) throws IOException { nNeighbors = in.readOptionalVInt(); method = in.readBoolean() ? in.readEnum(Method.class) : null; - minScoreToWriteFeatureInfluence = in.readOptionalDouble(); + featureInfluenceThreshold = in.readOptionalDouble(); } @Override @@ -105,7 +103,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(false); } - out.writeOptionalDouble(minScoreToWriteFeatureInfluence); + out.writeOptionalDouble(featureInfluenceThreshold); } @Override @@ -117,8 +115,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (method != null) { builder.field(METHOD.getPreferredName(), method); } - if (minScoreToWriteFeatureInfluence != null) { - builder.field(MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE.getPreferredName(), minScoreToWriteFeatureInfluence); + if (featureInfluenceThreshold != null) { + builder.field(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold); } builder.endObject(); return builder; @@ -131,12 +129,12 @@ public boolean equals(Object o) { OutlierDetection that = (OutlierDetection) o; return Objects.equals(nNeighbors, that.nNeighbors) && Objects.equals(method, that.method) - && Objects.equals(minScoreToWriteFeatureInfluence, that.minScoreToWriteFeatureInfluence); + && Objects.equals(featureInfluenceThreshold, that.featureInfluenceThreshold); } @Override public int hashCode() { - return Objects.hash(nNeighbors, method, minScoreToWriteFeatureInfluence); + return Objects.hash(nNeighbors, method, featureInfluenceThreshold); } @Override @@ -148,8 +146,8 @@ public Map getParams() { if (method != null) { params.put(METHOD.getPreferredName(), method); } - if (minScoreToWriteFeatureInfluence != null) { - params.put(MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE.getPreferredName(), minScoreToWriteFeatureInfluence); + if (featureInfluenceThreshold != null) { + params.put(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold); } return params; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index 75ce2d53315c3..0fc7770758ad3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -432,7 +432,7 @@ public static void addDataFrameAnalyticsFields(XContentBuilder builder) throws I .startObject(OutlierDetection.METHOD.getPreferredName()) .field(TYPE, KEYWORD) .endObject() - .startObject(OutlierDetection.MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE.getPreferredName()) + .startObject(OutlierDetection.FEATURE_INFLUENCE_THRESHOLD.getPreferredName()) .field(TYPE, DOUBLE) .endObject() .endObject() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java index eff33a37d9773..2b3497c0aff09 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java @@ -286,7 +286,7 @@ public final class ReservedFieldNames { OutlierDetection.NAME.getPreferredName(), OutlierDetection.N_NEIGHBORS.getPreferredName(), OutlierDetection.METHOD.getPreferredName(), - OutlierDetection.MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE.getPreferredName(), + OutlierDetection.FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), ElasticsearchMappings.CONFIG_TYPE, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetectionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetectionTests.java index d7a3269597101..db6ed7d34aa7d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetectionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetectionTests.java @@ -53,7 +53,7 @@ public void testGetParams_GivenExplicitValues() { assertThat(params.size(), equalTo(3)); assertThat(params.get(OutlierDetection.N_NEIGHBORS.getPreferredName()), equalTo(42)); assertThat(params.get(OutlierDetection.METHOD.getPreferredName()), equalTo(OutlierDetection.Method.LDOF)); - assertThat((Double) params.get(OutlierDetection.MINIMUM_SCORE_TO_WRITE_FEATURE_INFLUENCE.getPreferredName()), + assertThat((Double) params.get(OutlierDetection.FEATURE_INFLUENCE_THRESHOLD.getPreferredName()), is(closeTo(0.42, 1E-9))); } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml index b5860a8d55e51..73b5f62609c3e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml @@ -170,7 +170,7 @@ setup: "outlier_detection":{ "n_neighbors": 5, "method": "lof", - "minimum_score_to_write_feature_influence": 0.0 + "feature_influence_threshold": 0.0 } } } @@ -180,7 +180,7 @@ setup: - match: { dest.index: "index-dest" } - match: { analysis.outlier_detection.n_neighbors: 5 } - match: { analysis.outlier_detection.method: "lof" } - - match: { analysis.outlier_detection.minimum_score_to_write_feature_influence: 0.0 } + - match: { analysis.outlier_detection.feature_influence_threshold: 0.0 } - is_true: create_time - is_true: version From 1a40777cea3e7b0a67f650d283eaa4a5985e46cf Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Fri, 28 Jun 2019 12:10:23 +0300 Subject: [PATCH 053/140] [TEST][ML] Fix yml test failure for data_frame_analytics_crud Oddly, this passed CI on the PR that introduced it... --- .../rest-api-spec/test/ml/data_frame_analytics_crud.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml index 73b5f62609c3e..a171e879fa093 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml @@ -63,7 +63,7 @@ setup: id: "simple-outlier-detection-with-query" - match: { count: 1 } - match: { data_frame_analytics.0.id: "simple-outlier-detection-with-query" } - - match: { data_frame_analytics.0.source.index: "index-source" } + - match: { data_frame_analytics.0.source.index: ["index-source"] } - match: { data_frame_analytics.0.source.query: {"term" : { "user" : "Kimchy"} } } - match: { data_frame_analytics.0.dest.index: "index-dest" } - match: { data_frame_analytics.0.analysis: {"outlier_detection":{}} } From 62d13e9468c51a96dea7375178c5e319ef1681fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 28 Jun 2019 11:25:51 +0200 Subject: [PATCH 054/140] Remove rests of StandardHtmlStripAnalyzer (#43485) StandardHtmlStripAnalyzer has been deprecated in 6.x and cannot be used for new indices from 7.0 on. This change removes it entirely and also removes the from tests and deprecation logging that has still been around during the 7.x versions. --- .../analysis/common/CommonAnalysisPlugin.java | 3 - .../common/StandardHtmlStripAnalyzer.java | 58 ------------------- .../test/analysis-common/20_analyzers.yml | 9 --- .../index/analysis/AnalysisRegistry.java | 4 -- 4 files changed, 74 deletions(-) delete mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index ee6ff73ea4351..a655f42a36c26 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -322,9 +322,6 @@ public Map> getTokenizers() { @Override public List getPreBuiltAnalyzerProviderFactories() { List analyzers = new ArrayList<>(); - // TODO remove in 8.0 - analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.ELASTICSEARCH, - () -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET))); analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, () -> new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET))); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java deleted file mode 100644 index a35a0ea2a4a0b..0000000000000 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.analysis.common; - -import org.apache.lucene.analysis.CharArraySet; -import org.apache.lucene.analysis.LowerCaseFilter; -import org.apache.lucene.analysis.StopFilter; -import org.apache.lucene.analysis.StopwordAnalyzerBase; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.en.EnglishAnalyzer; -import org.apache.lucene.analysis.standard.StandardTokenizer; - -public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase { - - /** - * @deprecated use {@link StandardHtmlStripAnalyzer#StandardHtmlStripAnalyzer(CharArraySet)} instead - */ - @Deprecated - public StandardHtmlStripAnalyzer() { - super(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); - } - /** - * @deprecated in 6.5, can not create in 7.0, and we remove this in 8.0 - */ - @Deprecated - StandardHtmlStripAnalyzer(CharArraySet stopwords) { - super(stopwords); - } - - @Override - protected TokenStreamComponents createComponents(final String fieldName) { - final Tokenizer src = new StandardTokenizer(); - TokenStream tok = new LowerCaseFilter(src); - if (!stopwords.isEmpty()) { - tok = new StopFilter(tok, stopwords); - } - return new TokenStreamComponents(src, tok); - } - -} diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index 15de6fe664786..2904cc3e95b58 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -67,15 +67,6 @@ - length: { tokens: 1 } - match: { tokens.0.token: a1 b2 c3 d4 } ---- -"standard_html_strip": - - do: - catch: /\[standard_html_strip\] analyzer is not supported for new indices, use a custom analyzer using \[standard\] tokenizer and \[html_strip\] char_filter, plus \[lowercase\] filter/ - indices.analyze: - body: - text: - analyzer: standard_html_strip - --- "pattern": - do: diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 51b72680c9316..755266604add5 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -185,11 +185,7 @@ public Analyzer getAnalyzer(String analyzer) throws IOException { throw new ElasticsearchException("failed to load analyzer for name " + key, ex); }} ); - } else if ("standard_html_strip".equals(analyzer)) { - throw new IllegalArgumentException("[standard_html_strip] analyzer is not supported for new indices, " + - "use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"); } - return analyzerProvider.get(environment, analyzer).get(); } From b92de2845b50cdcc376cf6c6281ae3fdf78dda3e Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Fri, 28 Jun 2019 12:58:22 +0200 Subject: [PATCH 055/140] Enabled cannot be updated (#43701) Removed the invalid tip that enabled can be updated for existing fields and clarified instead that it cannot. Related to #33566 and #33933 --- docs/reference/mapping/params/enabled.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/mapping/params/enabled.asciidoc b/docs/reference/mapping/params/enabled.asciidoc index 7193c6aa9f6e3..edbfb1f77d71e 100644 --- a/docs/reference/mapping/params/enabled.asciidoc +++ b/docs/reference/mapping/params/enabled.asciidoc @@ -89,8 +89,8 @@ GET my_index/_mapping <3> <2> The document can be retrieved. <3> Checking the mapping reveals that no fields have been added. -TIP: The `enabled` setting can be updated on existing fields -using the <>. +The `enabled` setting for existing fields and the top-level mapping +definition cannot be updated. Note that because Elasticsearch completely skips parsing the field contents, it is possible to add non-object data to a disabled field: From f3317eb82d853623fa205b9e5fc92bcd9296a210 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 28 Jun 2019 15:33:24 +0300 Subject: [PATCH 056/140] Add support for 'flattened object' fields. (#42541) This commit merges the `object-fields` feature branch. The new 'flattened object' field type allows an entire JSON object to be indexed into a field, and provides limited search functionality over the field's contents. --- docs/reference/mapping/types.asciidoc | 9 +- .../mapping/types/flattened.asciidoc | 188 ++++++ docs/reference/rest-api/info.asciidoc | 4 + .../fielddata/IndexOrdinalsFieldData.java | 7 + .../GlobalOrdinalsIndexFieldData.java | 11 + .../plain/AbstractIndexOrdinalsFieldData.java | 5 + .../SortedSetDVOrdinalsIndexFieldData.java | 5 + .../index/mapper/ContentPath.java | 4 + .../index/mapper/DynamicKeyFieldMapper.java | 54 ++ .../index/mapper/FieldMapper.java | 2 +- .../index/mapper/FieldTypeLookup.java | 128 +++- .../index/mapper/MapperService.java | 5 +- .../index/query/QueryShardContext.java | 4 +- .../index/search/QueryParserHelper.java | 18 +- .../bucket/terms/TermsAggregatorFactory.java | 9 +- .../aggregations/support/ValuesSource.java | 14 + .../fielddata/IndexFieldDataServiceTests.java | 2 +- .../mapper/FieldNamesFieldTypeTests.java | 2 +- .../index/query/MatchQueryBuilderTests.java | 3 +- .../index/search/MatchPhraseQueryIT.java | 5 +- .../bucket/terms/StringTermsIT.java | 7 +- .../search/lookup/LeafDocLookupTests.java | 21 +- .../search/query/QueryStringIT.java | 1 + .../search/query/SimpleQueryStringIT.java | 1 - .../license/XPackLicenseState.java | 9 + .../xpack/core/XPackClientPlugin.java | 2 + .../elasticsearch/xpack/core/XPackField.java | 2 + .../xpack/core/XPackSettings.java | 5 + .../core/action/XPackInfoFeatureAction.java | 5 +- .../core/action/XPackUsageFeatureAction.java | 5 +- .../flattened/FlattenedFeatureSetUsage.java | 24 + x-pack/plugin/mapper-flattened/build.gradle | 27 + .../FlattenedInfoTransportAction.java | 47 ++ .../flattened/FlattenedMapperPlugin.java | 50 ++ .../FlattenedUsageTransportAction.java | 49 ++ .../mapper/FlatObjectFieldMapper.java | 616 ++++++++++++++++++ .../mapper/FlatObjectFieldParser.java | 167 +++++ .../KeyedFlatObjectAtomicFieldData.java | 251 +++++++ .../mapper/FlatObjectFieldLookupTests.java | 188 ++++++ .../mapper/FlatObjectFieldMapperTests.java | 457 +++++++++++++ .../mapper/FlatObjectFieldParserTests.java | 318 +++++++++ .../mapper/FlatObjectIndexFieldDataTests.java | 106 +++ .../mapper/FlatObjectSearchTests.java | 512 +++++++++++++++ .../KeyedFlatObjectAtomicFieldDataTests.java | 204 ++++++ .../mapper/KeyedFlatObjectFieldTypeTests.java | 155 +++++ .../mapper/RootFlatObjectFieldTypeTests.java | 116 ++++ .../rest-api-spec/test/flattened/10_basic.yml | 111 ++++ 47 files changed, 3891 insertions(+), 44 deletions(-) create mode 100644 docs/reference/mapping/types/flattened.asciidoc create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/DynamicKeyFieldMapper.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/flattened/FlattenedFeatureSetUsage.java create mode 100644 x-pack/plugin/mapper-flattened/build.gradle create mode 100644 x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedInfoTransportAction.java create mode 100644 x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedMapperPlugin.java create mode 100644 x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedUsageTransportAction.java create mode 100644 x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java create mode 100644 x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldParser.java create mode 100644 x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectAtomicFieldData.java create mode 100644 x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/index/mapper/FlatObjectFieldLookupTests.java create mode 100644 x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapperTests.java create mode 100644 x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldParserTests.java create mode 100644 x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectIndexFieldDataTests.java create mode 100644 x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectSearchTests.java create mode 100644 x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectAtomicFieldDataTests.java create mode 100644 x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectFieldTypeTests.java create mode 100644 x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/RootFlatObjectFieldTypeTests.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/flattened/10_basic.yml diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index c0db156dc3a1c..bdfcf1128a061 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -42,8 +42,6 @@ string:: <> and <> <>:: Defines parent/child relation for documents within the same index -<>:: Defines an alias to an existing field. - <>:: Record numeric feature to boost hits at query time. <>:: Record numeric features to boost hits at query time. @@ -54,6 +52,11 @@ string:: <> and <> <>:: A text-like field optimized for queries to implement as-you-type completion +<>:: Defines an alias to an existing field. + +<>:: Allows an entire JSON object to be indexed as a single field. + + [float] === Multi-fields @@ -82,6 +85,8 @@ include::types/date.asciidoc[] include::types/date_nanos.asciidoc[] +include::types/flattened.asciidoc[] + include::types/geo-point.asciidoc[] include::types/geo-shape.asciidoc[] diff --git a/docs/reference/mapping/types/flattened.asciidoc b/docs/reference/mapping/types/flattened.asciidoc new file mode 100644 index 0000000000000..80fd72c3dcc1f --- /dev/null +++ b/docs/reference/mapping/types/flattened.asciidoc @@ -0,0 +1,188 @@ +[role="xpack"] +[testenv="basic"] + +[[flattened]] +=== Flattened datatype + +By default, each subfield in an object is mapped and indexed separately. If +the names or types of the subfields are not known in advance, then they are +<>. + +The `flattened` type provides an alternative approach, where the entire +object is mapped as a single field. Given an object, the `flattened` +mapping will parse out its leaf values and index them into one field as +keywords. The object's contents can then be searched through simple queries +and aggregations. + +This data type can be useful for indexing objects with a large or unknown +number of unique keys. Only one field mapping is created for the whole JSON +object, which can help prevent a <> +from having too many distinct field mappings. + +On the other hand, flattened object fields present a trade-off in terms of +search functionality. Only basic queries are allowed, with no support for +numeric range queries or highlighting. Further information on the limitations +can be found in the <> section. + +NOTE: The `flattened` mapping type should **not** be used for indexing all +document content, as it treats all values as keywords and does not provide full +search functionality. The default approach, where each subfield has its own +entry in the mappings, works well in the majority of cases. + +An flattened object field can be created as follows: +[source,js] +-------------------------------- +PUT bug_reports +{ + "mappings": { + "properties": { + "title": { + "type": "text" + }, + "labels": { + "type": "flattened" + } + } + } +} + +POST bug_reports/_doc/1 +{ + "title": "Results are not sorted correctly.", + "labels": { + "priority": "urgent", + "release": ["v1.2.5", "v1.3.0"], + "timestamp": { + "created": 1541458026, + "closed": 1541457010 + } + } +} +-------------------------------- +// CONSOLE +// TESTSETUP + +During indexing, tokens are created for each leaf value in the JSON object. The +values are indexed as string keywords, without analysis or special handling for +numbers or dates. + +Querying the top-level `flattened` field searches all leaf values in the +object: + +[source,js] +-------------------------------- +POST bug_reports/_search +{ + "query": { + "term": {"labels": "urgent"} + } +} +-------------------------------- +// CONSOLE + +To query on a specific key in the flattened object, object dot notation is used: +[source,js] +-------------------------------- +POST bug_reports/_search +{ + "query": { + "term": {"labels.release": "v1.3.0"} + } +} +-------------------------------- +// CONSOLE + +[[supported-operations]] +==== Supported operations + +Because of the similarities in the way values are indexed, `flattened` +fields share much of the same mapping and search functionality as +<> fields. + +Currently, flattened object fields can be used with the following query types: + +- `term`, `terms`, and `terms_set` +- `prefix` +- `range` +- `match` and `multi_match` +- `query_string` and `simple_query_string` +- `exists` + +When querying, it is not possible to refer to field keys using wildcards, as in +`{ "term": {"labels.time*": 1541457010}}`. Note that all queries, including +`range`, treat the values as string keywords. Highlighting is not supported on +`flattened` fields. + +It is possible to sort on an flattened object field, as well as perform simple +keyword-style aggregations such as `terms`. As with queries, there is no +special support for numerics -- all values in the JSON object are treated as +keywords. When sorting, this implies that values are compared +lexicographically. + +Flattened object fields currently cannot be stored. It is not possible to +specify the <> parameter in the mapping. + +[[flattened-params]] +==== Parameters for flattened object fields + +The following mapping parameters are accepted: + +[horizontal] + +<>:: + + Mapping field-level query time boosting. Accepts a floating point number, + defaults to `1.0`. + +`depth_limit`:: + + The maximum allowed depth of the flattened object field, in terms of nested + inner objects. If a flattened object field exceeds this limit, then an + error will be thrown. Defaults to `20`. + +<>:: + + Should the field be stored on disk in a column-stride fashion, so that it + can later be used for sorting, aggregations, or scripting? Accepts `true` + (default) or `false`. + +<>:: + + Should global ordinals be loaded eagerly on refresh? Accepts `true` or + `false` (default). Enabling this is a good idea on fields that are + frequently used for terms aggregations. + +<>:: + + Leaf values longer than this limit will not be indexed. By default, there + is no limit and all values will be indexed. Note that this limit applies + to the leaf values within the flattened object field, and not the length of + the entire field. + +<>:: + + Determines if the field should be searchable. Accepts `true` (default) or + `false`. + +<>:: + + What information should be stored in the index for scoring purposes. + Defaults to `docs` but can also be set to `freqs` to take term frequency + into account when computing scores. + +<>:: + + A string value which is substituted for any explicit `null` values within + the flattened object field. Defaults to `null`, which means null sields are + treated as if it were missing. + +<>:: + + Which scoring algorithm or _similarity_ should be used. Defaults + to `BM25`. + +`split_queries_on_whitespace`:: + + Whether <> should split the input on + whitespace when building a query for this field. Accepts `true` or `false` + (default). diff --git a/docs/reference/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc index ad442fc0b99de..adbf5f01979a0 100644 --- a/docs/reference/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -71,6 +71,10 @@ Example response: "available" : true, "enabled" : true }, + "flattened" : { + "available" : true, + "enabled" : true + }, "graph" : { "available" : true, "enabled" : true diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexOrdinalsFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexOrdinalsFieldData.java index 8a9fabc9e1354..9d4dbaef21125 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexOrdinalsFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexOrdinalsFieldData.java @@ -47,4 +47,11 @@ public interface IndexOrdinalsFieldData extends IndexFieldData.Global mapperBuilder) { multiFieldsBuilder.add(mapperBuilder); return builder; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 0dc8b6a00c09e..7d8d5ef943de7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -20,11 +20,13 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.collect.CopyOnWriteHashMap; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.regex.Regex; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; +import java.util.Map; import java.util.Objects; import java.util.Set; @@ -36,15 +38,30 @@ class FieldTypeLookup implements Iterable { final CopyOnWriteHashMap fullNameToFieldType; private final CopyOnWriteHashMap aliasToConcreteName; + private final CopyOnWriteHashMap dynamicKeyMappers; + + /** + * The maximum field depth of any mapper that implements {@link DynamicKeyFieldMapper}. + * Allows us stop searching for a 'dynamic key' mapper as soon as we've passed the maximum + * possible field depth. + */ + private final int maxDynamicKeyDepth; + FieldTypeLookup() { fullNameToFieldType = new CopyOnWriteHashMap<>(); aliasToConcreteName = new CopyOnWriteHashMap<>(); + dynamicKeyMappers = new CopyOnWriteHashMap<>(); + maxDynamicKeyDepth = 0; } private FieldTypeLookup(CopyOnWriteHashMap fullNameToFieldType, - CopyOnWriteHashMap aliasToConcreteName) { + CopyOnWriteHashMap aliasToConcreteName, + CopyOnWriteHashMap dynamicKeyMappers, + int maxDynamicKeyDepth) { this.fullNameToFieldType = fullNameToFieldType; this.aliasToConcreteName = aliasToConcreteName; + this.dynamicKeyMappers = dynamicKeyMappers; + this.maxDynamicKeyDepth = maxDynamicKeyDepth; } /** @@ -63,14 +80,21 @@ public FieldTypeLookup copyAndAddAll(String type, CopyOnWriteHashMap fullName = this.fullNameToFieldType; CopyOnWriteHashMap aliases = this.aliasToConcreteName; + CopyOnWriteHashMap dynamicKeyMappers = this.dynamicKeyMappers; for (FieldMapper fieldMapper : fieldMappers) { + String fieldName = fieldMapper.name(); MappedFieldType fieldType = fieldMapper.fieldType(); MappedFieldType fullNameFieldType = fullName.get(fieldType.name()); if (Objects.equals(fieldType, fullNameFieldType) == false) { fullName = fullName.copyAndPut(fieldType.name(), fieldType); } + + if (fieldMapper instanceof DynamicKeyFieldMapper) { + DynamicKeyFieldMapper dynamicKeyMapper = (DynamicKeyFieldMapper) fieldMapper; + dynamicKeyMappers = dynamicKeyMappers.copyAndPut(fieldName, dynamicKeyMapper); + } } for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) { @@ -83,19 +107,97 @@ public FieldTypeLookup copyAndAddAll(String type, } } - return new FieldTypeLookup(fullName, aliases); + int maxDynamicKeyDepth = getMaxDynamicKeyDepth(aliases, dynamicKeyMappers); + + return new FieldTypeLookup(fullName, aliases, dynamicKeyMappers, maxDynamicKeyDepth); + } + + private static int getMaxDynamicKeyDepth(CopyOnWriteHashMap aliases, + CopyOnWriteHashMap dynamicKeyMappers) { + int maxFieldDepth = 0; + for (Map.Entry entry : aliases.entrySet()) { + String aliasName = entry.getKey(); + String path = entry.getValue(); + if (dynamicKeyMappers.containsKey(path)) { + maxFieldDepth = Math.max(maxFieldDepth, fieldDepth(aliasName)); + } + } + + for (String fieldName : dynamicKeyMappers.keySet()) { + if (dynamicKeyMappers.containsKey(fieldName)) { + maxFieldDepth = Math.max(maxFieldDepth, fieldDepth(fieldName)); + } + } + + return maxFieldDepth; + } + + /** + * Computes the total depth of this field by counting the number of parent fields + * in its path. As an example, the field 'parent1.parent2.field' has depth 3. + */ + private static int fieldDepth(String field) { + int numDots = 0; + int dotIndex = -1; + while (true) { + dotIndex = field.indexOf('.', dotIndex + 1); + if (dotIndex < 0) { + break; + } + numDots++; + } + return numDots + 1; } - /** Returns the field for the given field */ + /** + * Returns the mapped field type for the given field name. + */ public MappedFieldType get(String field) { String concreteField = aliasToConcreteName.getOrDefault(field, field); - return fullNameToFieldType.get(concreteField); + MappedFieldType fieldType = fullNameToFieldType.get(concreteField); + if (fieldType != null) { + return fieldType; + } + + // If the mapping contains fields that support dynamic sub-key lookup, check + // if this could correspond to a keyed field of the form 'path_to_field.path_to_key'. + return !dynamicKeyMappers.isEmpty() ? getKeyedFieldType(field) : null; + } + + /** + * Check if the given field corresponds to a dynamic lookup mapper of the + * form 'path_to_field.path_to_key'. If so, returns a field type that + * can be used to perform searches on this field. + */ + private MappedFieldType getKeyedFieldType(String field) { + int dotIndex = -1; + int fieldDepth = 0; + + while (true) { + if (++fieldDepth > maxDynamicKeyDepth) { + return null; + } + + dotIndex = field.indexOf('.', dotIndex + 1); + if (dotIndex < 0) { + return null; + } + + String parentField = field.substring(0, dotIndex); + String concreteField = aliasToConcreteName.getOrDefault(parentField, parentField); + DynamicKeyFieldMapper mapper = dynamicKeyMappers.get(concreteField); + + if (mapper != null) { + String key = field.substring(dotIndex + 1); + return mapper.keyedFieldType(key); + } + } } /** * Returns a list of the full names of a simple match regex like pattern against full name and index name. */ - public Collection simpleMatchToFullName(String pattern) { + public Set simpleMatchToFullName(String pattern) { Set fields = new HashSet<>(); for (MappedFieldType fieldType : this) { if (Regex.simpleMatch(pattern, fieldType.name())) { @@ -112,6 +214,20 @@ public Collection simpleMatchToFullName(String pattern) { @Override public Iterator iterator() { - return fullNameToFieldType.values().iterator(); + Iterator concreteFieldTypes = fullNameToFieldType.values().iterator(); + + if (dynamicKeyMappers.isEmpty()) { + return concreteFieldTypes; + } else { + Iterator keyedFieldTypes = dynamicKeyMappers.values().stream() + .map(mapper -> mapper.keyedFieldType("")) + .iterator(); + return Iterators.concat(concreteFieldTypes, keyedFieldTypes); + } + } + + // Visible for testing. + int maxKeyedLookupDepth() { + return maxDynamicKeyDepth; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 0c6120939ca39..259255fd6c398 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -21,7 +21,6 @@ import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; @@ -748,10 +747,10 @@ public MappedFieldType fullName(String fullName) { * Returns all the fields that match the given pattern. If the pattern is prefixed with a type * then the fields will be returned with a type prefix. */ - public Collection simpleMatchToFullName(String pattern) { + public Set simpleMatchToFullName(String pattern) { if (Regex.isSimpleMatchPattern(pattern) == false) { // no wildcards - return Collections.singletonList(pattern); + return Collections.singleton(pattern); } return fieldTypes.simpleMatchToFullName(pattern); } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 304cab433576a..634f9d06edc11 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -58,10 +58,10 @@ import org.elasticsearch.transport.RemoteClusterAware; import java.io.IOException; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Function; @@ -191,7 +191,7 @@ public Map copyNamedQueries() { * Returns all the fields that match a given pattern. If prefixed with a * type then the fields will be returned with a type prefix. */ - public Collection simpleMatchToIndexNames(String pattern) { + public Set simpleMatchToIndexNames(String pattern) { return mapperService.simpleMatchToFullName(pattern); } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java index fafe515ec09d7..8d6198e17e2cc 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java @@ -30,6 +30,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; /** * Helpers to extract and expand field names and boosts @@ -130,8 +131,9 @@ public static Map resolveMappingField(QueryShardContext context, */ public static Map resolveMappingField(QueryShardContext context, String fieldOrPattern, float weight, boolean acceptAllTypes, boolean acceptMetadataField, String fieldSuffix) { - Collection allFields = context.simpleMatchToIndexNames(fieldOrPattern); + Set allFields = context.simpleMatchToIndexNames(fieldOrPattern); Map fields = new HashMap<>(); + for (String fieldName : allFields) { if (fieldSuffix != null && context.fieldMapper(fieldName + fieldSuffix) != null) { fieldName = fieldName + fieldSuffix; @@ -159,13 +161,17 @@ public static Map resolveMappingField(QueryShardContext context, // other exceptions are parsing errors or not indexed fields: keep } } - // handle duplicates - float w = weight; - if (fields.containsKey(fieldType.name())) { - w *= fields.get(fieldType.name()); + + // Deduplicate aliases and their concrete fields. + String resolvedFieldName = fieldType.name(); + if (allFields.contains(resolvedFieldName)) { + fieldName = resolvedFieldName; } - fields.put(fieldType.name(), w); + + float w = fields.getOrDefault(fieldName, 1.0F); + fields.put(fieldName, w * weight); } + checkForTooManyFields(fields, context); return fields; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index 877a8e59bc2d3..a3247a3c63001 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -262,21 +262,26 @@ Aggregator create(String name, assert maxOrd != -1; final double ratio = maxOrd / ((double) context.searcher().getIndexReader().numDocs()); + assert valuesSource instanceof ValuesSource.Bytes.WithOrdinals; + ValuesSource.Bytes.WithOrdinals ordinalsValuesSource = (ValuesSource.Bytes.WithOrdinals) valuesSource; + if (factories == AggregatorFactories.EMPTY && includeExclude == null && Aggregator.descendsFromBucketAggregator(parent) == false && + ordinalsValuesSource.supportsGlobalOrdinalsMapping() && // we use the static COLLECT_SEGMENT_ORDS to allow tests to force specific optimizations (COLLECT_SEGMENT_ORDS!= null ? COLLECT_SEGMENT_ORDS.booleanValue() : ratio <= 0.5 && maxOrd <= 2048)) { /** * We can use the low cardinality execution mode iff this aggregator: * - has no sub-aggregator AND * - is not a child of a bucket aggregator AND + * - has a values source that can map from segment to global ordinals * - At least we reduce the number of global ordinals look-ups by half (ration <= 0.5) AND * - the maximum global ordinal is less than 2048 (LOW_CARDINALITY has additional memory usage, * which directly linked to maxOrd, so we need to limit). */ return new GlobalOrdinalsStringTermsAggregator.LowCardinality(name, factories, - (ValuesSource.Bytes.WithOrdinals) valuesSource, order, format, bucketCountThresholds, context, parent, false, + ordinalsValuesSource, order, format, bucketCountThresholds, context, parent, false, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); } @@ -301,7 +306,7 @@ Aggregator create(String name, remapGlobalOrds = false; } } - return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order, + return new GlobalOrdinalsStringTermsAggregator(name, factories, ordinalsValuesSource, order, format, bucketCountThresholds, filter, context, parent, remapGlobalOrds, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java index e5cfa694cbf9e..7fd38288a821b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java @@ -111,6 +111,15 @@ public abstract SortedSetDocValues ordinalsValues(LeafReaderContext context) public abstract SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) throws IOException; + /** + * Whether this values source is able to provide a mapping between global and segment ordinals, + * by returning the underlying {@link OrdinalMap}. If this method returns false, then calling + * {@link #globalOrdinalsMapping} will result in an {@link UnsupportedOperationException}. + */ + public boolean supportsGlobalOrdinalsMapping() { + return true; + } + /** Returns a mapping from segment ordinals to global ordinals. */ public abstract LongUnaryOperator globalOrdinalsMapping(LeafReaderContext context) throws IOException; @@ -153,6 +162,11 @@ public SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) { return atomicFieldData.getOrdinalsValues(); } + @Override + public boolean supportsGlobalOrdinalsMapping() { + return indexFieldData.supportsGlobalOrdinalsMapping(); + } + @Override public LongUnaryOperator globalOrdinalsMapping(LeafReaderContext context) throws IOException { final IndexOrdinalsFieldData global = indexFieldData.loadGlobal((DirectoryReader)context.parent.reader()); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 7b17b727be2b1..bfa7cf9a9e1f1 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -39,9 +39,9 @@ import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java index 9394b7195320b..d01c8efe6c7c6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java @@ -63,7 +63,7 @@ public void testTermQuery() { MapperService mapperService = mock(MapperService.class); when(mapperService.fullName("_field_names")).thenReturn(fieldNamesFieldType); when(mapperService.fullName("field_name")).thenReturn(fieldType); - when(mapperService.simpleMatchToFullName("field_name")).thenReturn(Collections.singletonList("field_name")); + when(mapperService.simpleMatchToFullName("field_name")).thenReturn(Collections.singleton("field_name")); QueryShardContext queryShardContext = new QueryShardContext(0, indexSettings, null, null, null, mapperService, null, null, null, null, null, null, () -> 0L, null); diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 99c7a1e7fdc14..0b870127b9794 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -150,7 +150,8 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName()); if (query instanceof TermQuery && fieldType != null) { String queryValue = queryBuilder.value().toString(); - if (queryBuilder.analyzer() == null || queryBuilder.analyzer().equals("simple")) { + if (isTextField(queryBuilder.fieldName()) + && (queryBuilder.analyzer() == null || queryBuilder.analyzer().equals("simple"))) { queryValue = queryValue.toLowerCase(Locale.ROOT); } Query expectedTermQuery = fieldType.termQuery(queryValue, context); diff --git a/server/src/test/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java b/server/src/test/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java index ebd0bf0460aeb..5bbc4cb389f29 100644 --- a/server/src/test/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java +++ b/server/src/test/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Before; @@ -33,6 +32,7 @@ import java.util.List; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -55,7 +55,7 @@ public void testZeroTermsQuery() throws ExecutionException, InterruptedException List indexRequests = getIndexRequests(); indexRandom(true, false, indexRequests); - MatchPhraseQueryBuilder baseQuery = QueryBuilders.matchPhraseQuery("name", "the who") + MatchPhraseQueryBuilder baseQuery = matchPhraseQuery("name", "the who") .analyzer("standard_stopwords"); MatchPhraseQueryBuilder matchNoneQuery = baseQuery.zeroTermsQuery(ZeroTermsQuery.NONE); @@ -67,7 +67,6 @@ public void testZeroTermsQuery() throws ExecutionException, InterruptedException assertHitCount(matchAllResponse, 2L); } - private List getIndexRequests() { List requests = new ArrayList<>(); requests.add(client().prepareIndex(INDEX, "band").setSource("name", "the beatles")); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index ee17b70f737c7..65952e7048ab0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -42,7 +42,6 @@ import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; -import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; @@ -71,6 +70,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.IsNull.notNullValue; @ESIntegTestCase.SuiteScopeTestCase @@ -138,7 +138,8 @@ public void setupSuiteScopeCluster() throws Exception { .startArray(MULTI_VALUED_FIELD_NAME) .value("val" + i) .value("val" + (i + 1)) - .endArray().endObject())); + .endArray() + .endObject())); } getMultiSortDocs(builders); @@ -574,7 +575,7 @@ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { ElasticsearchException rootCause = rootCauses[0]; if (rootCause instanceof AggregationExecutionException) { AggregationExecutionException aggException = (AggregationExecutionException) rootCause; - assertThat(aggException.getMessage(), Matchers.startsWith("Invalid aggregation order path")); + assertThat(aggException.getMessage(), startsWith("Invalid aggregation order path")); } else { throw e; } diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java index 1f4253825667a..07ae5b40dce9c 100644 --- a/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java @@ -49,13 +49,7 @@ public void setUp() throws Exception { when(mapperService.fullName("alias")).thenReturn(fieldType); docValues = mock(ScriptDocValues.class); - - AtomicFieldData atomicFieldData = mock(AtomicFieldData.class); - doReturn(docValues).when(atomicFieldData).getScriptValues(); - - IndexFieldData fieldData = mock(IndexFieldData.class); - when(fieldData.getFieldName()).thenReturn("field"); - doReturn(atomicFieldData).when(fieldData).load(anyObject()); + IndexFieldData fieldData = createFieldData(docValues); docLookup = new LeafDocLookup(mapperService, ignored -> fieldData, @@ -67,8 +61,19 @@ public void testBasicLookup() { assertEquals(docValues, fetchedDocValues); } - public void testLookupWithFieldAlias() { + public void testFieldAliases() { ScriptDocValues fetchedDocValues = docLookup.get("alias"); assertEquals(docValues, fetchedDocValues); } + + private IndexFieldData createFieldData(ScriptDocValues scriptDocValues) { + AtomicFieldData atomicFieldData = mock(AtomicFieldData.class); + doReturn(scriptDocValues).when(atomicFieldData).getScriptValues(); + + IndexFieldData fieldData = mock(IndexFieldData.class); + when(fieldData.getFieldName()).thenReturn("field"); + doReturn(atomicFieldData).when(fieldData).load(anyObject()); + + return fieldData; + } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java index 8d5933e6cfc75..0498cc63fb9f9 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java @@ -359,6 +359,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { assertHits(response.getHits(), "1"); } + private void assertHits(SearchHits hits, String... ids) { assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); Set hitIds = new HashSet<>(); diff --git a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 13f03472923d2..e7675a9cbb146 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -642,7 +642,6 @@ public void testFieldAliasWithWildcardField() throws Exception { assertHits(response.getHits(), "2", "3"); } - public void testFieldAliasOnDisallowedFieldType() throws Exception { String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); assertAcked(prepareCreate("test").setSource(indexBody, XContentType.JSON)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 99c1e2e91715e..859fc074e1796 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -696,6 +696,15 @@ public synchronized boolean isJdbcAllowed() { return licensed && localStatus.active; } + /** + * Determine if support for flattened object fields should be enabled. + *

    + * Flattened fields are available for all license types except {@link OperationMode#MISSING}. + */ + public synchronized boolean isFlattenedAllowed() { + return status.active; + } + /** * Determine if Vectors support should be enabled. *

    diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 6f63c7895f1a0..87d6795638a10 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.core.dataframe.transforms.SyncConfig; import org.elasticsearch.xpack.core.dataframe.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; +import org.elasticsearch.xpack.core.flattened.FlattenedFeatureSetUsage; import org.elasticsearch.xpack.core.graph.GraphFeatureSetUsage; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; import org.elasticsearch.xpack.core.indexlifecycle.AllocateAction; @@ -486,6 +487,7 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(Task.Status.class, DataFrameField.TASK_NAME, DataFrameTransformState::new), new NamedWriteableRegistry.Entry(PersistentTaskState.class, DataFrameField.TASK_NAME, DataFrameTransformState::new), new NamedWriteableRegistry.Entry(SyncConfig.class, DataFrameField.TIME_BASED_SYNC.getPreferredName(), TimeSyncConfig::new), + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.FLATTENED, FlattenedFeatureSetUsage::new), // Vectors new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.VECTORS, VectorsFeatureSetUsage::new), // Voting Only Node diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index 54e0f58ae2867..351606e321873 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -37,6 +37,8 @@ public final class XPackField { public static final String CCR = "ccr"; /** Name constant for the data frame feature. */ public static final String DATA_FRAME = "data_frame"; + /** Name constant for flattened fields. */ + public static final String FLATTENED = "flattened"; /** Name constant for the vectors feature. */ public static final String VECTORS = "vectors"; /** Name constant for the voting-only-node feature. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 95184d02e1ead..ad11f103f33ea 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -114,6 +114,10 @@ private XPackSettings() { /** Setting for enabling or disabling sql. Defaults to true. */ public static final Setting SQL_ENABLED = Setting.boolSetting("xpack.sql.enabled", true, Setting.Property.NodeScope); + /** Setting for enabling or disabling flattened fields. Defaults to true. */ + public static final Setting FLATTENED_ENABLED = Setting.boolSetting("xpack.flattened.enabled", + true, Setting.Property.NodeScope); + /** Setting for enabling or disabling vectors. Defaults to true. */ public static final Setting VECTORS_ENABLED = Setting.boolSetting("xpack.vectors.enabled", true, Setting.Property.NodeScope); @@ -207,6 +211,7 @@ public static List> getAllSettings() { settings.add(PASSWORD_HASHING_ALGORITHM); settings.add(INDEX_LIFECYCLE_ENABLED); settings.add(DATA_FRAME_ENABLED); + settings.add(FLATTENED_ENABLED); settings.add(VECTORS_ENABLED); return Collections.unmodifiableList(settings); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java index 81fe7c7f0af21..fd8b2853ee85a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java @@ -33,12 +33,13 @@ public class XPackInfoFeatureAction extends Action { public static final XPackInfoFeatureAction INDEX_LIFECYCLE = new XPackInfoFeatureAction(XPackField.INDEX_LIFECYCLE); public static final XPackInfoFeatureAction CCR = new XPackInfoFeatureAction(XPackField.CCR); public static final XPackInfoFeatureAction DATA_FRAME = new XPackInfoFeatureAction(XPackField.DATA_FRAME); + public static final XPackInfoFeatureAction FLATTENED = new XPackInfoFeatureAction(XPackField.FLATTENED); public static final XPackInfoFeatureAction VECTORS = new XPackInfoFeatureAction(XPackField.VECTORS); public static final XPackInfoFeatureAction VOTING_ONLY = new XPackInfoFeatureAction(XPackField.VOTING_ONLY); public static final List ALL = Arrays.asList( - SECURITY, MONITORING, WATCHER, GRAPH, MACHINE_LEARNING, LOGSTASH, SQL, ROLLUP, INDEX_LIFECYCLE, CCR, DATA_FRAME, VECTORS, - VOTING_ONLY + SECURITY, MONITORING, WATCHER, GRAPH, MACHINE_LEARNING, LOGSTASH, SQL, ROLLUP, INDEX_LIFECYCLE, CCR, DATA_FRAME, FLATTENED, + VECTORS, VOTING_ONLY ); private XPackInfoFeatureAction(String name) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java index 7721f9070a6d5..e1d473f7dcf9e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java @@ -33,12 +33,13 @@ public class XPackUsageFeatureAction extends Action { public static final XPackUsageFeatureAction INDEX_LIFECYCLE = new XPackUsageFeatureAction(XPackField.INDEX_LIFECYCLE); public static final XPackUsageFeatureAction CCR = new XPackUsageFeatureAction(XPackField.CCR); public static final XPackUsageFeatureAction DATA_FRAME = new XPackUsageFeatureAction(XPackField.DATA_FRAME); + public static final XPackUsageFeatureAction FLATTENED = new XPackUsageFeatureAction(XPackField.FLATTENED); public static final XPackUsageFeatureAction VECTORS = new XPackUsageFeatureAction(XPackField.VECTORS); public static final XPackUsageFeatureAction VOTING_ONLY = new XPackUsageFeatureAction(XPackField.VOTING_ONLY); public static final List ALL = Arrays.asList( - SECURITY, MONITORING, WATCHER, GRAPH, MACHINE_LEARNING, LOGSTASH, SQL, ROLLUP, INDEX_LIFECYCLE, CCR, DATA_FRAME, VECTORS, - VOTING_ONLY + SECURITY, MONITORING, WATCHER, GRAPH, MACHINE_LEARNING, LOGSTASH, SQL, ROLLUP, INDEX_LIFECYCLE, CCR, DATA_FRAME, FLATTENED, + VECTORS, VOTING_ONLY ); private XPackUsageFeatureAction(String name) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/flattened/FlattenedFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/flattened/FlattenedFeatureSetUsage.java new file mode 100644 index 0000000000000..fe1a9bdadfdc1 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/flattened/FlattenedFeatureSetUsage.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.flattened; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.core.XPackFeatureSet; +import org.elasticsearch.xpack.core.XPackField; + +import java.io.IOException; + +public class FlattenedFeatureSetUsage extends XPackFeatureSet.Usage { + + public FlattenedFeatureSetUsage(StreamInput input) throws IOException { + super(input); + } + + public FlattenedFeatureSetUsage(boolean available, boolean enabled) { + super(XPackField.FLATTENED, available, enabled); + } +} diff --git a/x-pack/plugin/mapper-flattened/build.gradle b/x-pack/plugin/mapper-flattened/build.gradle new file mode 100644 index 0000000000000..c9e1c408304ea --- /dev/null +++ b/x-pack/plugin/mapper-flattened/build.gradle @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +evaluationDependsOn(xpackModule('core')) + +apply plugin: 'elasticsearch.esplugin' + +esplugin { + name 'flattened' + description 'Module for the flattened field type, which allows JSON objects to be flattened into a single field.' + classname 'org.elasticsearch.xpack.flattened.FlattenedMapperPlugin' + extendedPlugins = ['x-pack-core'] +} +archivesBaseName = 'x-pack-flattened' + +dependencies { + compileOnly project(path: xpackModule('core'), configuration: 'default') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } +} + +integTest.enabled = false diff --git a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedInfoTransportAction.java b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedInfoTransportAction.java new file mode 100644 index 0000000000000..abdfd8c4b85c9 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedInfoTransportAction.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureTransportAction; + +public class FlattenedInfoTransportAction extends XPackInfoFeatureTransportAction { + + private final boolean enabled; + private final XPackLicenseState licenseState; + + @Inject + public FlattenedInfoTransportAction(TransportService transportService, ActionFilters actionFilters, + Settings settings, XPackLicenseState licenseState) { + super(XPackInfoFeatureAction.FLATTENED.name(), transportService, actionFilters); + this.enabled = XPackSettings.FLATTENED_ENABLED.get(settings); + this.licenseState = licenseState; + } + + @Override + public String name() { + return XPackField.FLATTENED; + } + + @Override + public boolean available() { + return licenseState.isFlattenedAllowed(); + } + + @Override + public boolean enabled() { + return enabled; + } + +} diff --git a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedMapperPlugin.java b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedMapperPlugin.java new file mode 100644 index 0000000000000..1dd407f70d3a2 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedMapperPlugin.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; + +public class FlattenedMapperPlugin extends Plugin implements MapperPlugin, ActionPlugin { + + protected final boolean enabled; + + public FlattenedMapperPlugin(Settings settings) { + this.enabled = XPackSettings.FLATTENED_ENABLED.get(settings); + } + + @Override + public Map getMappers() { + if (enabled == false) { + return emptyMap(); + } + return singletonMap(FlatObjectFieldMapper.CONTENT_TYPE, new FlatObjectFieldMapper.TypeParser()); + } + + @Override + public List> getActions() { + return Arrays.asList( + new ActionHandler<>(XPackUsageFeatureAction.FLATTENED, FlattenedUsageTransportAction.class), + new ActionHandler<>(XPackInfoFeatureAction.FLATTENED, FlattenedInfoTransportAction.class)); + } +} diff --git a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedUsageTransportAction.java b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedUsageTransportAction.java new file mode 100644 index 0000000000000..c928e3c5f5dcc --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/FlattenedUsageTransportAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; +import org.elasticsearch.xpack.core.flattened.FlattenedFeatureSetUsage; + +public class FlattenedUsageTransportAction extends XPackUsageFeatureTransportAction { + + private final Settings settings; + private final XPackLicenseState licenseState; + + @Inject + public FlattenedUsageTransportAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + Settings settings, XPackLicenseState licenseState) { + super(XPackUsageFeatureAction.FLATTENED.name(), transportService, clusterService, + threadPool, actionFilters, indexNameExpressionResolver); + this.settings = settings; + this.licenseState = licenseState; + } + + @Override + protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, + ActionListener listener) { + FlattenedFeatureSetUsage usage = + new FlattenedFeatureSetUsage(licenseState.isFlattenedAllowed(), XPackSettings.FLATTENED_ENABLED.get(settings)); + listener.onResponse(new XPackUsageFeatureResponse(usage)); + } +} diff --git a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java new file mode 100644 index 0000000000000..eef76f0807bfb --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java @@ -0,0 +1,616 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.OrdinalMap; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalyzerScope; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; +import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; +import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; +import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData; +import org.elasticsearch.index.mapper.DynamicKeyFieldMapper; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.StringFieldType; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.MultiValueMode; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.index.mapper.TypeParsers.parseField; + +/** + * A field mapper that accepts a JSON object and flattens it into a single field. This data type + * can be a useful alternative to an 'object' mapping when the object has a large, unknown set + * of keys. + * + * Currently the mapper extracts all leaf values of the JSON object, converts them to their text + * representations, and indexes each one as a keyword. It creates both a 'keyed' version of the token + * to allow searches on particular key-value pairs, as well as a 'root' token without the key + * + * As an example, given a flat object field called 'flat_object' and the following input + * + * { + * "flat_object": { + * "key1": "some value", + * "key2": { + * "key3": true + * } + * } + * } + * + * the mapper will produce untokenized string fields with the name "flat_object" and values + * "some value" and "true", as well as string fields called "flat_object._keyed" with values + * "key\0some value" and "key2.key3\0true". Note that \0 is used as a reserved separator + * character (see {@link FlatObjectFieldParser#SEPARATOR}). + */ +public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper { + + public static final String CONTENT_TYPE = "flattened"; + private static final String KEYED_FIELD_SUFFIX = "._keyed"; + + private static class Defaults { + public static final MappedFieldType FIELD_TYPE = new RootFlatObjectFieldType(); + + static { + FIELD_TYPE.setTokenized(false); + FIELD_TYPE.setStored(false); + FIELD_TYPE.setHasDocValues(true); + FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); + FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.freeze(); + } + + public static final int DEPTH_LIMIT = 20; + public static final int IGNORE_ABOVE = Integer.MAX_VALUE; + } + + public static class Builder extends FieldMapper.Builder { + private int depthLimit = Defaults.DEPTH_LIMIT; + private int ignoreAbove = Defaults.IGNORE_ABOVE; + + public Builder(String name) { + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); + builder = this; + } + + @Override + public RootFlatObjectFieldType fieldType() { + return (RootFlatObjectFieldType) super.fieldType(); + } + + @Override + public Builder indexOptions(IndexOptions indexOptions) { + if (indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) > 0) { + throw new IllegalArgumentException("The [" + CONTENT_TYPE + + "] field does not support positions, got [index_options]=" + + indexOptionToString(indexOptions)); + } + return super.indexOptions(indexOptions); + } + + public Builder depthLimit(int depthLimit) { + if (depthLimit < 0) { + throw new IllegalArgumentException("[depth_limit] must be positive, got " + depthLimit); + } + this.depthLimit = depthLimit; + return this; + } + + public Builder eagerGlobalOrdinals(boolean eagerGlobalOrdinals) { + fieldType().setEagerGlobalOrdinals(eagerGlobalOrdinals); + return builder; + } + + public Builder ignoreAbove(int ignoreAbove) { + if (ignoreAbove < 0) { + throw new IllegalArgumentException("[ignore_above] must be positive, got " + ignoreAbove); + } + this.ignoreAbove = ignoreAbove; + return this; + } + + public Builder splitQueriesOnWhitespace(boolean splitQueriesOnWhitespace) { + fieldType().setSplitQueriesOnWhitespace(splitQueriesOnWhitespace); + return builder; + } + + @Override + public Builder addMultiField(Mapper.Builder mapperBuilder) { + throw new UnsupportedOperationException("[fields] is not supported for [" + CONTENT_TYPE + "] fields."); + } + + @Override + public Builder copyTo(CopyTo copyTo) { + throw new UnsupportedOperationException("[copy_to] is not supported for [" + CONTENT_TYPE + "] fields."); + } + + @Override + public Builder store(boolean store) { + throw new UnsupportedOperationException("[store] is not supported for [" + CONTENT_TYPE + "] fields."); + } + + @Override + public FlatObjectFieldMapper build(BuilderContext context) { + setupFieldType(context); + if (fieldType().splitQueriesOnWhitespace()) { + NamedAnalyzer whitespaceAnalyzer = new NamedAnalyzer("whitespace", AnalyzerScope.INDEX, new WhitespaceAnalyzer()); + fieldType().setSearchAnalyzer(whitespaceAnalyzer); + } + return new FlatObjectFieldMapper(name, fieldType, defaultFieldType, + ignoreAbove, depthLimit, context.indexSettings()); + } + } + + public static class TypeParser implements Mapper.TypeParser { + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + Builder builder = new Builder(name); + parseField(builder, name, node, parserContext); + for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + String propName = entry.getKey(); + Object propNode = entry.getValue(); + if (propName.equals("depth_limit")) { + builder.depthLimit(XContentMapValues.nodeIntegerValue(propNode, -1)); + iterator.remove(); + } else if (propName.equals("eager_global_ordinals")) { + builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode, "eager_global_ordinals")); + iterator.remove(); + } else if (propName.equals("ignore_above")) { + builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1)); + iterator.remove(); + } else if (propName.equals("null_value")) { + if (propNode == null) { + throw new MapperParsingException("Property [null_value] cannot be null."); + } + builder.nullValue(propNode.toString()); + iterator.remove(); + } else if (propName.equals("split_queries_on_whitespace")) { + builder.splitQueriesOnWhitespace + (XContentMapValues.nodeBooleanValue(propNode, "split_queries_on_whitespace")); + iterator.remove(); + } + } + return builder; + } + } + + /** + * A field type that represents the values under a particular JSON key, used + * when searching under a specific key as in 'my_flat_object.key: some_value'. + */ + public static final class KeyedFlatObjectFieldType extends StringFieldType { + private final String key; + private boolean splitQueriesOnWhitespace; + + public KeyedFlatObjectFieldType(String key) { + setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + this.key = key; + } + + public KeyedFlatObjectFieldType clone() { + return new KeyedFlatObjectFieldType(this); + } + + private KeyedFlatObjectFieldType(KeyedFlatObjectFieldType ref) { + super(ref); + this.key = ref.key; + this.splitQueriesOnWhitespace = ref.splitQueriesOnWhitespace; + } + + private KeyedFlatObjectFieldType(String name, String key, RootFlatObjectFieldType ref) { + super(ref); + setName(name); + this.key = key; + this.splitQueriesOnWhitespace = ref.splitQueriesOnWhitespace; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + KeyedFlatObjectFieldType that = (KeyedFlatObjectFieldType) o; + return splitQueriesOnWhitespace == that.splitQueriesOnWhitespace; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), splitQueriesOnWhitespace); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + public String key() { + return key; + } + + public boolean splitQueriesOnWhitespace() { + return splitQueriesOnWhitespace; + } + + public void setSplitQueriesOnWhitespace(boolean splitQueriesOnWhitespace) { + checkIfFrozen(); + this.splitQueriesOnWhitespace = splitQueriesOnWhitespace; + } + + @Override + public Query existsQuery(QueryShardContext context) { + Term term = new Term(name(), FlatObjectFieldParser.createKeyedValue(key, "")); + return new PrefixQuery(term); + } + + @Override + public Query rangeQuery(Object lowerTerm, + Object upperTerm, + boolean includeLower, + boolean includeUpper, + QueryShardContext context) { + + // We require range queries to specify both bounds because an unbounded query could incorrectly match + // values from other keys. For example, a query on the 'first' key with only a lower bound would become + // ("first\0value", null), which would also match the value "second\0value" belonging to the key 'second'. + if (lowerTerm == null || upperTerm == null) { + throw new IllegalArgumentException("[range] queries on keyed [" + CONTENT_TYPE + + "] fields must include both an upper and a lower bound."); + } + + return super.rangeQuery(lowerTerm, upperTerm, + includeLower, includeUpper, context); + } + + @Override + public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, + boolean transpositions) { + throw new UnsupportedOperationException("[fuzzy] queries are not currently supported on keyed " + + "[" + CONTENT_TYPE + "] fields."); + } + + @Override + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, + MultiTermQuery.RewriteMethod method, QueryShardContext context) { + throw new UnsupportedOperationException("[regexp] queries are not currently supported on keyed " + + "[" + CONTENT_TYPE + "] fields."); + } + + @Override + public Query wildcardQuery(String value, + MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + throw new UnsupportedOperationException("[wildcard] queries are not currently supported on keyed " + + "[" + CONTENT_TYPE + "] fields."); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + if (value == null) { + return null; + } + + String stringValue = value instanceof BytesRef + ? ((BytesRef) value).utf8ToString() + : value.toString(); + String keyedValue = FlatObjectFieldParser.createKeyedValue(key, stringValue); + return new BytesRef(keyedValue); + } + + @Override + public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) { + failIfNoDocValues(); + return new KeyedFlatObjectFieldData.Builder(key); + } + } + + /** + * A field data implementation that gives access to the values associated with + * a particular JSON key. + * + * This class wraps the field data that is built directly on the keyed flat object field, + * and filters out values whose prefix doesn't match the requested key. Loading and caching + * is fully delegated to the wrapped field data, so that different {@link KeyedFlatObjectFieldData} + * for the same flat object field share the same global ordinals. + * + * Because of the code-level complexity it would introduce, it is currently not possible + * to retrieve the underlying global ordinals map through {@link #getOrdinalMap()}. + */ + public static class KeyedFlatObjectFieldData implements IndexOrdinalsFieldData { + private final String key; + private final IndexOrdinalsFieldData delegate; + + private KeyedFlatObjectFieldData(String key, IndexOrdinalsFieldData delegate) { + this.delegate = delegate; + this.key = key; + } + + public String getKey() { + return key; + } + + @Override + public String getFieldName() { + return delegate.getFieldName(); + } + + @Override + public SortField sortField(Object missingValue, + MultiValueMode sortMode, + XFieldComparatorSource.Nested nested, + boolean reverse) { + XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); + return new SortField(getFieldName(), source, reverse); + } + + @Override + public void clear() { + delegate.clear(); + } + + @Override + public AtomicOrdinalsFieldData load(LeafReaderContext context) { + AtomicOrdinalsFieldData fieldData = delegate.load(context); + return new KeyedFlatObjectAtomicFieldData(key, fieldData); + } + + @Override + public AtomicOrdinalsFieldData loadDirect(LeafReaderContext context) throws Exception { + AtomicOrdinalsFieldData fieldData = delegate.loadDirect(context); + return new KeyedFlatObjectAtomicFieldData(key, fieldData); + } + + @Override + public IndexOrdinalsFieldData loadGlobal(DirectoryReader indexReader) { + IndexOrdinalsFieldData fieldData = delegate.loadGlobal(indexReader); + return new KeyedFlatObjectFieldData(key, fieldData); + } + + @Override + public IndexOrdinalsFieldData localGlobalDirect(DirectoryReader indexReader) throws Exception { + IndexOrdinalsFieldData fieldData = delegate.localGlobalDirect(indexReader); + return new KeyedFlatObjectFieldData(key, fieldData); + } + + @Override + public OrdinalMap getOrdinalMap() { + throw new UnsupportedOperationException("The field data for the flat object field [" + + delegate.getFieldName() + "] does not allow access to the underlying ordinal map."); + } + + @Override + public boolean supportsGlobalOrdinalsMapping() { + return false; + } + + @Override + public Index index() { + return delegate.index(); + } + + public static class Builder implements IndexFieldData.Builder { + private final String key; + + Builder(String key) { + this.key = key; + } + + @Override + public IndexFieldData build(IndexSettings indexSettings, + MappedFieldType fieldType, + IndexFieldDataCache cache, + CircuitBreakerService breakerService, + MapperService mapperService) { + String fieldName = fieldType.name(); + IndexOrdinalsFieldData delegate = new SortedSetDVOrdinalsIndexFieldData(indexSettings, + cache, fieldName, breakerService, AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION); + return new KeyedFlatObjectFieldData(key, delegate); + } + } + } + + /** + * A field type that represents all 'root' values. This field type is used in + * searches on the flat object field itself, e.g. 'my_flat_object: some_value'. + */ + public static final class RootFlatObjectFieldType extends StringFieldType { + private boolean splitQueriesOnWhitespace; + + public RootFlatObjectFieldType() { + setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + } + + private RootFlatObjectFieldType(RootFlatObjectFieldType ref) { + super(ref); + this.splitQueriesOnWhitespace = ref.splitQueriesOnWhitespace; + } + + public RootFlatObjectFieldType clone() { + return new RootFlatObjectFieldType(this); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + RootFlatObjectFieldType that = (RootFlatObjectFieldType) o; + return splitQueriesOnWhitespace == that.splitQueriesOnWhitespace; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), splitQueriesOnWhitespace); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + public boolean splitQueriesOnWhitespace() { + return splitQueriesOnWhitespace; + } + + public void setSplitQueriesOnWhitespace(boolean splitQueriesOnWhitespace) { + checkIfFrozen(); + this.splitQueriesOnWhitespace = splitQueriesOnWhitespace; + } + + @Override + public Object valueForDisplay(Object value) { + if (value == null) { + return null; + } + BytesRef binaryValue = (BytesRef) value; + return binaryValue.utf8ToString(); + } + + @Override + public Query existsQuery(QueryShardContext context) { + if (hasDocValues()) { + return new DocValuesFieldExistsQuery(name()); + } else { + return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name())); + } + } + + @Override + public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) { + failIfNoDocValues(); + return new DocValuesIndexFieldData.Builder(); + } + } + + private final FlatObjectFieldParser fieldParser; + private int depthLimit; + private int ignoreAbove; + + private FlatObjectFieldMapper(String simpleName, + MappedFieldType fieldType, + MappedFieldType defaultFieldType, + int ignoreAbove, + int depthLimit, + Settings indexSettings) { + super(simpleName, fieldType, defaultFieldType, indexSettings, CopyTo.empty()); + assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0; + + this.depthLimit = depthLimit; + this.ignoreAbove = ignoreAbove; + this.fieldParser = new FlatObjectFieldParser(fieldType.name(), keyedFieldName(), + fieldType, depthLimit, ignoreAbove); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + protected void doMerge(Mapper mergeWith) { + super.doMerge(mergeWith); + this.ignoreAbove = ((FlatObjectFieldMapper) mergeWith).ignoreAbove; + } + + @Override + protected FlatObjectFieldMapper clone() { + return (FlatObjectFieldMapper) super.clone(); + } + + @Override + public RootFlatObjectFieldType fieldType() { + return (RootFlatObjectFieldType) super.fieldType(); + } + + @Override + public KeyedFlatObjectFieldType keyedFieldType(String key) { + return new KeyedFlatObjectFieldType(keyedFieldName(), key, fieldType()); + } + + public String keyedFieldName() { + return fieldType.name() + KEYED_FIELD_SUFFIX; + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) { + return; + } + + if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.hasDocValues()) { + context.parser().skipChildren(); + return; + } + + XContentParser xContentParser = context.parser(); + fields.addAll(fieldParser.parse(xContentParser)); + + if (!fieldType.hasDocValues()) { + createFieldNamesField(context, fields); + } + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + + if (includeDefaults || depthLimit != Defaults.DEPTH_LIMIT) { + builder.field("depth_limit", depthLimit); + } + + if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) { + builder.field("ignore_above", ignoreAbove); + } + + if (includeDefaults || fieldType().nullValue() != null) { + builder.field("null_value", fieldType().nullValue()); + } + + if (includeDefaults || fieldType().splitQueriesOnWhitespace()) { + builder.field("split_queries_on_whitespace", fieldType().splitQueriesOnWhitespace()); + } + } +} diff --git a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldParser.java b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldParser.java new file mode 100644 index 0000000000000..a855440b2be71 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldParser.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import org.apache.lucene.document.Field; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.MappedFieldType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A helper class for {@link FlatObjectFieldMapper} parses a JSON object + * and produces a pair of indexable fields for each leaf value. + */ +class FlatObjectFieldParser { + static final String SEPARATOR = "\0"; + private static final byte SEPARATOR_BYTE = '\0'; + + private final String rootFieldName; + private final String keyedFieldName; + + private final MappedFieldType fieldType; + private final int depthLimit; + private final int ignoreAbove; + + FlatObjectFieldParser(String rootFieldName, + String keyedFieldName, + MappedFieldType fieldType, + int depthLimit, + int ignoreAbove) { + this.rootFieldName = rootFieldName; + this.keyedFieldName = keyedFieldName; + this.fieldType = fieldType; + this.depthLimit = depthLimit; + this.ignoreAbove = ignoreAbove; + } + + public List parse(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, + parser.currentToken(), + parser::getTokenLocation); + + ContentPath path = new ContentPath(); + List fields = new ArrayList<>(); + + parseObject(parser, path, fields); + return fields; + } + + private void parseObject(XContentParser parser, + ContentPath path, + List fields) throws IOException { + String currentName = null; + while (true) { + XContentParser.Token token = parser.nextToken(); + if (token == XContentParser.Token.END_OBJECT) { + return; + } + + if (token == XContentParser.Token.FIELD_NAME) { + currentName = parser.currentName(); + } else { + assert currentName != null; + parseFieldValue(token, parser, path, currentName, fields); + } + } + } + + private void parseArray(XContentParser parser, + ContentPath path, + String currentName, + List fields) throws IOException { + while (true) { + XContentParser.Token token = parser.nextToken(); + if (token == XContentParser.Token.END_ARRAY) { + return; + } + parseFieldValue(token, parser, path, currentName, fields); + } + } + + private void parseFieldValue(XContentParser.Token token, + XContentParser parser, + ContentPath path, + String currentName, + List fields) throws IOException { + if (token == XContentParser.Token.START_OBJECT) { + path.add(currentName); + validateDepthLimit(path); + parseObject(parser, path, fields); + path.remove(); + } else if (token == XContentParser.Token.START_ARRAY) { + parseArray(parser, path, currentName, fields); + } else if (token.isValue()) { + String value = parser.text(); + addField(path, currentName, value, fields); + } else if (token == XContentParser.Token.VALUE_NULL) { + if (fieldType.nullValueAsString() != null) { + addField(path, currentName, fieldType.nullValueAsString(), fields); + } + } else { + // Note that we throw an exception here just to be safe. We don't actually expect to reach + // this case, since XContentParser verifies that the input is well-formed as it parses. + throw new IllegalArgumentException("Encountered unexpected token [" + token.toString() + "]."); + } + } + + private void addField(ContentPath path, + String currentName, + String value, + List fields) { + if (value.length() > ignoreAbove) { + return; + } + + String key = path.pathAsText(currentName); + if (key.contains(SEPARATOR)) { + throw new IllegalArgumentException("Keys in [flattened] fields cannot contain the reserved character \\0." + + " Offending key: [" + key + "]."); + } + String keyedValue = createKeyedValue(key, value); + + if (fieldType.indexOptions() != IndexOptions.NONE) { + fields.add(new StringField(rootFieldName, new BytesRef(value), Field.Store.NO)); + fields.add(new StringField(keyedFieldName, new BytesRef(keyedValue), Field.Store.NO)); + } + + if (fieldType.hasDocValues()) { + fields.add(new SortedSetDocValuesField(rootFieldName, new BytesRef(value))); + fields.add(new SortedSetDocValuesField(keyedFieldName, new BytesRef(keyedValue))); + } + } + + private void validateDepthLimit(ContentPath path) { + if (path.length() + 1 > depthLimit) { + throw new IllegalArgumentException("The provided [flattened] field [" + rootFieldName +"]" + + " exceeds the maximum depth limit of [" + depthLimit + "]."); + } + } + + static String createKeyedValue(String key, String value) { + return key + SEPARATOR + value; + } + + static BytesRef extractKey(BytesRef keyedValue) { + int length; + for (length = 0; length < keyedValue.length; length++){ + if (keyedValue.bytes[keyedValue.offset + length] == SEPARATOR_BYTE) { + break; + } + } + return new BytesRef(keyedValue.bytes, keyedValue.offset, length); + } +} diff --git a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectAtomicFieldData.java b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectAtomicFieldData.java new file mode 100644 index 0000000000000..7f5a5235095b8 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectAtomicFieldData.java @@ -0,0 +1,251 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.FieldData; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.index.fielddata.SortedBinaryDocValues; +import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Collection; + +/** + * The atomic field data implementation for {@link FlatObjectFieldMapper.KeyedFlatObjectFieldType}. + * + * This class wraps the field data that is built directly on the keyed flat object field, + * and filters out values whose prefix doesn't match the requested key. + * + * In order to support all usage patterns, the delegate's ordinal values are shifted + * to range from 0 to the number of total values. + */ +public class KeyedFlatObjectAtomicFieldData implements AtomicOrdinalsFieldData { + + private final String key; + private final AtomicOrdinalsFieldData delegate; + + KeyedFlatObjectAtomicFieldData(String key, + AtomicOrdinalsFieldData delegate) { + this.key = key; + this.delegate = delegate; + } + + @Override + public long ramBytesUsed() { + return delegate.ramBytesUsed(); + } + + @Override + public Collection getChildResources() { + return delegate.getChildResources(); + } + + @Override + public SortedSetDocValues getOrdinalsValues() { + BytesRef keyBytes = new BytesRef(key); + SortedSetDocValues values = delegate.getOrdinalsValues(); + + long minOrd, maxOrd; + try { + minOrd = findMinOrd(keyBytes, values); + if (minOrd < 0) { + return DocValues.emptySortedSet(); + } + maxOrd = findMaxOrd(keyBytes, values); + assert maxOrd >= 0; + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + return new KeyedJsonDocValues(keyBytes, values, minOrd, maxOrd); + } + + @Override + public void close() { + delegate.close(); + } + + @Override + public ScriptDocValues getScriptValues() { + return AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION + .apply(getOrdinalsValues()); + } + + @Override + public SortedBinaryDocValues getBytesValues() { + return FieldData.toString(getOrdinalsValues()); + } + + /** + * Performs a binary search to find the first term with 'key' as a prefix. + */ + static long findMinOrd(BytesRef key, SortedSetDocValues delegate) throws IOException { + long low = 0; + long high = delegate.getValueCount() - 1; + + long result = -1; + while (low <= high) { + long mid = (low + high) >>> 1; + final BytesRef term = delegate.lookupOrd(mid); + int cmp = compare(key, term); + + if (cmp == 0) { + result = mid; + high = mid - 1; + } else if (cmp < 0) { + high = mid - 1; + } else { + low = mid + 1; + } + } + return result; + } + + /** + * Performs a binary search to find the last term with 'key' as a prefix. + */ + static long findMaxOrd(BytesRef key, SortedSetDocValues delegate) throws IOException { + long low = 0; + long high = delegate.getValueCount() - 1; + + long result = -1; + while (low <= high) { + long mid = (low + high) >>> 1; + final BytesRef term = delegate.lookupOrd(mid); + int cmp = compare(key, term); + + if (cmp == 0) { + result = mid; + low = mid + 1; + } else if (cmp < 0) { + high = mid - 1; + } else { + low = mid + 1; + } + } + return result; + } + + private static int compare(BytesRef key, BytesRef term) { + BytesRef extractedKey = FlatObjectFieldParser.extractKey(term); + return key.compareTo(extractedKey); + } + + private static class KeyedJsonDocValues extends AbstractSortedSetDocValues { + + private final BytesRef key; + private final SortedSetDocValues delegate; + + /** + * The first and last ordinals whose term has 'key' as a prefix. These + * values must be non-negative (there is at least one matching term). + */ + private final long minOrd; + private final long maxOrd; + + /** + * We cache the first ordinal in a document to avoid unnecessary iterations + * through the delegate doc values. If no ordinal is cached for the current + * document, this value will be -1. + */ + private long cachedNextOrd; + + private KeyedJsonDocValues(BytesRef key, + SortedSetDocValues delegate, + long minOrd, + long maxOrd) { + assert minOrd >= 0 && maxOrd >= 0; + this.key = key; + this.delegate = delegate; + this.minOrd = minOrd; + this.maxOrd = maxOrd; + this.cachedNextOrd = -1; + } + + @Override + public long getValueCount() { + return maxOrd - minOrd + 1; + } + + /** + * Returns the (un-prefixed) term value for the requested ordinal. + * + * Note that this method can only be called on ordinals returned from {@link #nextOrd()}. + */ + @Override + public BytesRef lookupOrd(long ord) throws IOException { + long delegateOrd = unmapOrd(ord); + BytesRef keyedValue = delegate.lookupOrd(delegateOrd); + + int prefixLength = key.length + 1; + int valueLength = keyedValue.length - prefixLength; + return new BytesRef(keyedValue.bytes, prefixLength, valueLength); + } + + @Override + public long nextOrd() throws IOException { + if (cachedNextOrd >= 0) { + long nextOrd = cachedNextOrd; + cachedNextOrd = -1; + return mapOrd(nextOrd); + } + + long ord = delegate.nextOrd(); + if (ord != NO_MORE_ORDS && ord <= maxOrd) { + assert ord >= minOrd; + return mapOrd(ord); + } else { + return NO_MORE_ORDS; + } + } + + @Override + public boolean advanceExact(int target) throws IOException { + if (delegate.advanceExact(target)) { + while (true) { + long ord = delegate.nextOrd(); + if (ord == NO_MORE_ORDS || ord > maxOrd) { + break; + } + + if (ord >= minOrd) { + cachedNextOrd = ord; + return true; + } + } + } + + cachedNextOrd = -1; + return false; + } + + /** + * Maps an ordinal from the delegate doc values into the filtered ordinal space. The + * ordinal is shifted to lie in the range [0, (maxOrd - minOrd)]. + */ + private long mapOrd(long ord) { + assert minOrd <= ord && ord <= maxOrd; + return ord - minOrd; + } + + /** + * Given a filtered ordinal in the range [0, (maxOrd - minOrd)], maps it into the + * delegate ordinal space. + */ + private long unmapOrd(long ord) { + assert 0 <= ord && ord <= maxOrd - minOrd; + return ord + minOrd; + } + } +} diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/index/mapper/FlatObjectFieldLookupTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/index/mapper/FlatObjectFieldLookupTests.java new file mode 100644 index 0000000000000..86e79cd556c4f --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/index/mapper/FlatObjectFieldLookupTests.java @@ -0,0 +1,188 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.search.lookup.LeafDocLookup; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper; +import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper.KeyedFlatObjectFieldType; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Function; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class FlatObjectFieldLookupTests extends ESTestCase { + + public void testFieldTypeLookup() { + String fieldName = "object1.object2.field"; + FlatObjectFieldMapper mapper = createFlatObjectMapper(fieldName); + + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", singletonList(mapper), emptyList()); + assertEquals(mapper.fieldType(), lookup.get(fieldName)); + + String objectKey = "key1.key2"; + String searchFieldName = fieldName + "." + objectKey; + + MappedFieldType searchFieldType = lookup.get(searchFieldName); + assertEquals(mapper.keyedFieldName(), searchFieldType.name()); + assertThat(searchFieldType, instanceOf(KeyedFlatObjectFieldType.class)); + + FlatObjectFieldMapper.KeyedFlatObjectFieldType keyedFieldType = (KeyedFlatObjectFieldType) searchFieldType; + assertEquals(objectKey, keyedFieldType.key()); + } + + public void testFieldTypeLookupWithAlias() { + String fieldName = "object1.object2.field"; + FlatObjectFieldMapper mapper = createFlatObjectMapper(fieldName); + + String aliasName = "alias"; + FieldAliasMapper alias = new FieldAliasMapper(aliasName, aliasName, fieldName); + + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", singletonList(mapper), singletonList(alias)); + assertEquals(mapper.fieldType(), lookup.get(aliasName)); + + String objectKey = "key1.key2"; + String searchFieldName = aliasName + "." + objectKey; + + MappedFieldType searchFieldType = lookup.get(searchFieldName); + assertEquals(mapper.keyedFieldName(), searchFieldType.name()); + assertThat(searchFieldType, instanceOf(KeyedFlatObjectFieldType.class)); + + KeyedFlatObjectFieldType keyedFieldType = (KeyedFlatObjectFieldType) searchFieldType; + assertEquals(objectKey, keyedFieldType.key()); + } + + public void testFieldTypeLookupWithMultipleFields() { + String field1 = "object1.object2.field"; + String field2 = "object1.field"; + String field3 = "object2.field"; + + FlatObjectFieldMapper mapper1 = createFlatObjectMapper(field1); + FlatObjectFieldMapper mapper2 = createFlatObjectMapper(field2); + FlatObjectFieldMapper mapper3 = createFlatObjectMapper(field3); + + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", Arrays.asList(mapper1, mapper2), emptyList()); + assertNotNull(lookup.get(field1 + ".some.key")); + assertNotNull(lookup.get(field2 + ".some.key")); + + lookup = lookup.copyAndAddAll("type", singletonList(mapper3), emptyList()); + assertNotNull(lookup.get(field1 + ".some.key")); + assertNotNull(lookup.get(field2 + ".some.key")); + assertNotNull(lookup.get(field3 + ".some.key")); + } + + public void testMaxDynamicKeyDepth() { + FieldTypeLookup lookup = new FieldTypeLookup(); + assertEquals(0, lookup.maxKeyedLookupDepth()); + + // Add a flattened object field. + String flatObjectName = "object1.object2.field"; + FlatObjectFieldMapper flatObjectField = createFlatObjectMapper(flatObjectName); + lookup = lookup.copyAndAddAll("type", singletonList(flatObjectField), emptyList()); + assertEquals(3, lookup.maxKeyedLookupDepth()); + + // Add a short alias to that field. + String aliasName = "alias"; + FieldAliasMapper alias = new FieldAliasMapper(aliasName, aliasName, flatObjectName); + lookup = lookup.copyAndAddAll("type", emptyList(), singletonList(alias)); + assertEquals(3, lookup.maxKeyedLookupDepth()); + + // Add a longer alias to that field. + String longAliasName = "object1.object2.object3.alias"; + FieldAliasMapper longAlias = new FieldAliasMapper(longAliasName, longAliasName, flatObjectName); + lookup = lookup.copyAndAddAll("type", emptyList(), singletonList(longAlias)); + assertEquals(4, lookup.maxKeyedLookupDepth()); + + // Update the long alias to refer to a non-flattened object field. + String fieldName = "field"; + MockFieldMapper field = new MockFieldMapper(fieldName); + longAlias = new FieldAliasMapper(longAliasName, longAliasName, fieldName); + lookup = lookup.copyAndAddAll("type", singletonList(field), singletonList(longAlias)); + assertEquals(3, lookup.maxKeyedLookupDepth()); + } + + public void testFieldLookupIterator() { + MockFieldMapper mapper = new MockFieldMapper("foo"); + FlatObjectFieldMapper flatObjectMapper = createFlatObjectMapper("object1.object2.field"); + + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", Arrays.asList(mapper, flatObjectMapper), emptyList()); + + Set fieldNames = new HashSet<>(); + for (MappedFieldType fieldType : lookup) { + fieldNames.add(fieldType.name()); + } + + assertThat(fieldNames, containsInAnyOrder( + mapper.name(), flatObjectMapper.name(), flatObjectMapper.keyedFieldName())); + } + + private FlatObjectFieldMapper createFlatObjectMapper(String fieldName) { + Settings settings = Settings.builder() + .put("index.version.created", Version.CURRENT) + .build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + return new FlatObjectFieldMapper.Builder(fieldName).build(context); + } + + public void testScriptDocValuesLookup() { + MapperService mapperService = mock(MapperService.class); + + ScriptDocValues docValues1 = mock(ScriptDocValues.class); + IndexFieldData fieldData1 = createFieldData(docValues1); + + ScriptDocValues docValues2 = mock(ScriptDocValues.class); + IndexFieldData fieldData2 = createFieldData(docValues2); + + KeyedFlatObjectFieldType fieldType1 = new KeyedFlatObjectFieldType("key1"); + when(mapperService.fullName("json.key1")).thenReturn(fieldType1); + + KeyedFlatObjectFieldType fieldType2 = new KeyedFlatObjectFieldType( "key2"); + when(mapperService.fullName("json.key2")).thenReturn(fieldType2); + + Function> fieldDataSupplier = fieldType -> { + KeyedFlatObjectFieldType keyedFieldType = (KeyedFlatObjectFieldType) fieldType; + return keyedFieldType.key().equals("key1") ? fieldData1 : fieldData2; + }; + + SearchLookup searchLookup = new SearchLookup(mapperService, fieldDataSupplier); + LeafDocLookup docLookup = searchLookup.doc().getLeafDocLookup(null); + + assertEquals(docValues1, docLookup.get("json.key1")); + assertEquals(docValues2, docLookup.get("json.key2")); + } + + private IndexFieldData createFieldData(ScriptDocValues scriptDocValues) { + AtomicFieldData atomicFieldData = mock(AtomicFieldData.class); + doReturn(scriptDocValues).when(atomicFieldData).getScriptValues(); + + IndexFieldData fieldData = mock(IndexFieldData.class); + when(fieldData.getFieldName()).thenReturn("field"); + doReturn(atomicFieldData).when(fieldData).load(anyObject()); + + return fieldData; + } +} diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapperTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapperTests.java new file mode 100644 index 0000000000000..71cdfa0693835 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapperTests.java @@ -0,0 +1,457 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.flattened.FlattenedMapperPlugin; +import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper.KeyedFlatObjectFieldType; +import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper.RootFlatObjectFieldType; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; +import static org.hamcrest.Matchers.equalTo; + +public class FlatObjectFieldMapperTests extends ESSingleNodeTestCase { + private IndexService indexService; + private DocumentMapperParser parser; + + @Before + public void setup() { + indexService = createIndex("test"); + parser = indexService.mapperService().documentMapperParser(); + } + + @Override + protected Collection> getPlugins() { + return pluginList(FlattenedMapperPlugin.class, XPackPlugin.class); + } + + public void testDefaults() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + BytesReference doc = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .startObject("field") + .field("key", "value") + .endObject() + .endObject()); + + ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", doc, XContentType.JSON)); + + // Check the root fields. + IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + + assertEquals("field", fields[0].name()); + assertEquals(new BytesRef("value"), fields[0].binaryValue()); + assertFalse(fields[0].fieldType().stored()); + assertTrue(fields[0].fieldType().omitNorms()); + assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType()); + + assertEquals("field", fields[1].name()); + assertEquals(new BytesRef("value"), fields[1].binaryValue()); + assertEquals(DocValuesType.SORTED_SET, fields[1].fieldType().docValuesType()); + + // Check the keyed fields. + IndexableField[] keyedFields = parsedDoc.rootDoc().getFields("field._keyed"); + assertEquals(2, keyedFields.length); + + assertEquals("field._keyed", keyedFields[0].name()); + assertEquals(new BytesRef("key\0value"), keyedFields[0].binaryValue()); + assertFalse(keyedFields[0].fieldType().stored()); + assertTrue(keyedFields[0].fieldType().omitNorms()); + assertEquals(DocValuesType.NONE, keyedFields[0].fieldType().docValuesType()); + + assertEquals("field._keyed", keyedFields[1].name()); + assertEquals(new BytesRef("key\0value"), keyedFields[1].binaryValue()); + assertEquals(DocValuesType.SORTED_SET, keyedFields[1].fieldType().docValuesType()); + + // Check that there is no 'field names' field. + IndexableField[] fieldNamesFields = parsedDoc.rootDoc().getFields(FieldNamesFieldMapper.NAME); + assertEquals(0, fieldNamesFields.length); + } + + public void testDisableIndex() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .field("index", false) + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + BytesReference doc = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .startObject("field") + .field("key", "value") + .endObject() + .endObject()); + + ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", doc, XContentType.JSON)); + + IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + assertEquals(DocValuesType.SORTED_SET, fields[0].fieldType().docValuesType()); + + IndexableField[] keyedFields = parsedDoc.rootDoc().getFields("field._keyed"); + assertEquals(1, keyedFields.length); + assertEquals(DocValuesType.SORTED_SET, keyedFields[0].fieldType().docValuesType()); + } + + public void testDisableDocValues() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .field("doc_values", false) + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + BytesReference doc = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .startObject("field") + .field("key", "value") + .endObject() + .endObject()); + + ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", doc, XContentType.JSON)); + + IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType()); + + IndexableField[] keyedFields = parsedDoc.rootDoc().getFields("field._keyed"); + assertEquals(1, keyedFields.length); + assertEquals(DocValuesType.NONE, keyedFields[0].fieldType().docValuesType()); + + IndexableField[] fieldNamesFields = parsedDoc.rootDoc().getFields(FieldNamesFieldMapper.NAME); + assertEquals(1, fieldNamesFields.length); + assertEquals("field", fieldNamesFields[0].stringValue()); + } + + public void testEnableStore() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .field("store", true) + .endObject() + .endObject() + .endObject() + .endObject()); + + expectThrows(UnsupportedOperationException.class, () -> + parser.parse("type", new CompressedXContent(mapping))); + } + + public void testIndexOptions() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .field("index_options", "freqs") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + for (String indexOptions : Arrays.asList("positions", "offsets")) { + String invalidMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .field("index_options", indexOptions) + .endObject() + .endObject() + .endObject() + .endObject()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(invalidMapping))); + assertEquals("The [flattened] field does not support positions, got [index_options]=" + indexOptions, e.getMessage()); + } + } + + public void testNullField() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + BytesReference doc = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .nullField("field") + .endObject()); + + ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", doc, XContentType.JSON)); + IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); + assertEquals(0, fields.length); + } + + public void testMalformedJson() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + BytesReference doc1 = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .field("field", "not a JSON object") + .endObject()); + + expectThrows(MapperParsingException.class, () -> mapper.parse( + new SourceToParse("test", "type", "1", doc1, XContentType.JSON))); + + BytesReference doc2 = new BytesArray("{ \"field\": { \"key\": \"value\" "); + expectThrows(MapperParsingException.class, () -> mapper.parse( + new SourceToParse("test", "type", "1", doc2, XContentType.JSON))); + } + + public void testFieldMultiplicity() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + BytesReference doc = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .startArray("field") + .startObject() + .field("key1", "value") + .endObject() + .startObject() + .field("key2", true) + .field("key3", false) + .endObject() + .endArray() + .endObject()); + + ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", doc, XContentType.JSON)); + + IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); + assertEquals(6, fields.length); + assertEquals(new BytesRef("value"), fields[0].binaryValue()); + assertEquals(new BytesRef("true"), fields[2].binaryValue()); + assertEquals(new BytesRef("false"), fields[4].binaryValue()); + + IndexableField[] keyedFields = parsedDoc.rootDoc().getFields("field._keyed"); + assertEquals(6, keyedFields.length); + assertEquals(new BytesRef("key1\0value"), keyedFields[0].binaryValue()); + assertEquals(new BytesRef("key2\0true"), keyedFields[2].binaryValue()); + assertEquals(new BytesRef("key3\0false"), keyedFields[4].binaryValue()); + } + + public void testDepthLimit() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .field("depth_limit", 2) + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + BytesReference doc = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .startObject("field") + .startObject("key1") + .startObject("key2") + .field("key3", "value") + .endObject() + .endObject() + .endObject() + .endObject()); + + expectThrows(MapperParsingException.class, () -> + mapper.parse(new SourceToParse("test", "type", "1", doc, XContentType.JSON))); + } + + public void testEagerGlobalOrdinals() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .field("eager_global_ordinals", true) + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + assertTrue(fieldMapper.fieldType().eagerGlobalOrdinals()); + } + + public void testIgnoreAbove() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .field("ignore_above", 10) + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + BytesReference doc = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .startArray("field") + .startObject() + .field("key", "a longer than usual value") + .endObject() + .endArray() + .endObject()); + + ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", doc, XContentType.JSON)); + IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); + assertEquals(0, fields.length); + } + + public void testNullValues() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .endObject() + .startObject("other_field") + .field("type", "flattened") + .field("null_value", "placeholder") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + BytesReference doc = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .startObject("field") + .nullField("key") + .endObject() + .startObject("other_field") + .nullField("key") + .endObject() + .endObject()); + ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", doc, XContentType.JSON)); + + IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); + assertEquals(0, fields.length); + + IndexableField[] otherFields = parsedDoc.rootDoc().getFields("other_field"); + assertEquals(2, otherFields.length); + assertEquals(new BytesRef("placeholder"), otherFields[0].binaryValue()); + assertEquals(new BytesRef("placeholder"), otherFields[1].binaryValue()); + + IndexableField[] prefixedOtherFields = parsedDoc.rootDoc().getFields("other_field._keyed"); + assertEquals(2, prefixedOtherFields.length); + assertEquals(new BytesRef("key\0placeholder"), prefixedOtherFields[0].binaryValue()); + assertEquals(new BytesRef("key\0placeholder"), prefixedOtherFields[1].binaryValue()); + } + + public void testSplitQueriesOnWhitespace() throws IOException { + MapperService mapperService = indexService.mapperService(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "flattened") + .field("split_queries_on_whitespace", true) + .endObject() + .endObject() + .endObject().endObject()); + mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + + RootFlatObjectFieldType rootFieldType = (RootFlatObjectFieldType) mapperService.fullName("field"); + assertThat(rootFieldType.searchAnalyzer().name(), equalTo("whitespace")); + assertTokenStreamContents(rootFieldType.searchAnalyzer().analyzer().tokenStream("", "Hello World"), + new String[] {"Hello", "World"}); + + KeyedFlatObjectFieldType keyedFieldType = (KeyedFlatObjectFieldType) mapperService.fullName("field.key"); + assertThat(keyedFieldType.searchAnalyzer().name(), equalTo("whitespace")); + assertTokenStreamContents(keyedFieldType.searchAnalyzer().analyzer().tokenStream("", "Hello World"), + new String[] {"Hello", "World"}); + } +} diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldParserTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldParserTests.java new file mode 100644 index 0000000000000..ff4de0d85cee1 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldParserTests.java @@ -0,0 +1,318 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import com.fasterxml.jackson.core.JsonParseException; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MockFieldMapper.FakeFieldType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.XContentTestUtils; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; + +public class FlatObjectFieldParserTests extends ESTestCase { + private FlatObjectFieldParser parser; + + @Before + public void setUp() throws Exception { + super.setUp(); + parser = new FlatObjectFieldParser("field", "field._keyed", + new FakeFieldType(), + Integer.MAX_VALUE, + Integer.MAX_VALUE); + } + + public void testTextValues() throws Exception { + String input = "{ \"key1\": \"value1\", \"key2\": \"value2\" }"; + XContentParser xContentParser = createXContentParser(input); + + List fields = parser.parse(xContentParser); + assertEquals(4, fields.size()); + + IndexableField field1 = fields.get(0); + assertEquals("field", field1.name()); + assertEquals(new BytesRef("value1"), field1.binaryValue()); + + IndexableField keyedField1 = fields.get(1); + assertEquals("field._keyed", keyedField1.name()); + assertEquals(new BytesRef("key1\0value1"), keyedField1.binaryValue()); + + IndexableField field2 = fields.get(2); + assertEquals("field", field2.name()); + assertEquals(new BytesRef("value2"), field2.binaryValue()); + + IndexableField keyedField2 = fields.get(3); + assertEquals("field._keyed", keyedField2.name()); + assertEquals(new BytesRef("key2\0value2"), keyedField2.binaryValue()); + } + + public void testNumericValues() throws Exception { + String input = "{ \"key\": 2.718 }"; + XContentParser xContentParser = createXContentParser(input); + + List fields = parser.parse(xContentParser); + assertEquals(2, fields.size()); + + IndexableField field = fields.get(0); + assertEquals("field", field.name()); + assertEquals(new BytesRef("2.718"), field.binaryValue()); + + IndexableField keyedField = fields.get(1); + assertEquals("field._keyed", keyedField.name()); + assertEquals(new BytesRef("key" + '\0' + "2.718"), keyedField.binaryValue()); + } + + public void testBooleanValues() throws Exception { + String input = "{ \"key\": false }"; + XContentParser xContentParser = createXContentParser(input); + + List fields = parser.parse(xContentParser); + assertEquals(2, fields.size()); + + IndexableField field = fields.get(0); + assertEquals("field", field.name()); + assertEquals(new BytesRef("false"), field.binaryValue()); + + IndexableField keyedField = fields.get(1); + assertEquals("field._keyed", keyedField.name()); + assertEquals(new BytesRef("key\0false"), keyedField.binaryValue()); + } + + public void testBasicArrays() throws Exception { + String input = "{ \"key\": [true, false] }"; + XContentParser xContentParser = createXContentParser(input); + + List fields = parser.parse(xContentParser); + assertEquals(4, fields.size()); + + IndexableField field1 = fields.get(0); + assertEquals("field", field1.name()); + assertEquals(new BytesRef("true"), field1.binaryValue()); + + IndexableField keyedField1 = fields.get(1); + assertEquals("field._keyed", keyedField1.name()); + assertEquals(new BytesRef("key\0true"), keyedField1.binaryValue()); + + IndexableField field2 = fields.get(2); + assertEquals("field", field2.name()); + assertEquals(new BytesRef("false"), field2.binaryValue()); + + IndexableField keyedField2 = fields.get(3); + assertEquals("field._keyed", keyedField2.name()); + assertEquals(new BytesRef("key\0false"), keyedField2.binaryValue()); + } + + public void testArrayOfArrays() throws Exception { + String input = "{ \"key\": [[true, \"value\"], 3] }"; + XContentParser xContentParser = createXContentParser(input); + + List fields = parser.parse(xContentParser); + assertEquals(6, fields.size()); + + IndexableField field1 = fields.get(0); + assertEquals("field", field1.name()); + assertEquals(new BytesRef("true"), field1.binaryValue()); + + IndexableField keyedField1 = fields.get(1); + assertEquals("field._keyed", keyedField1.name()); + assertEquals(new BytesRef("key\0true"), keyedField1.binaryValue()); + + IndexableField field2 = fields.get(2); + assertEquals("field", field2.name()); + assertEquals(new BytesRef("value"), field2.binaryValue()); + + IndexableField keyedField2 = fields.get(3); + assertEquals("field._keyed", keyedField2.name()); + assertEquals(new BytesRef("key\0value"), keyedField2.binaryValue()); + + IndexableField field3 = fields.get(4); + assertEquals("field", field3.name()); + assertEquals(new BytesRef("3"), field3.binaryValue()); + + IndexableField keyedField3 = fields.get(5); + assertEquals("field._keyed", keyedField3.name()); + assertEquals(new BytesRef("key" + "\0" + "3"), keyedField3.binaryValue()); + } + + public void testArraysOfObjects() throws Exception { + String input = "{ \"key1\": [{ \"key2\": true }, false], \"key4\": \"other\" }"; + XContentParser xContentParser = createXContentParser(input); + + List fields = parser.parse(xContentParser); + assertEquals(6, fields.size()); + + IndexableField field1 = fields.get(0); + assertEquals("field", field1.name()); + assertEquals(new BytesRef("true"), field1.binaryValue()); + + IndexableField keyedField1 = fields.get(1); + assertEquals("field._keyed", keyedField1.name()); + assertEquals(new BytesRef("key1.key2\0true"), keyedField1.binaryValue()); + + IndexableField field2 = fields.get(2); + assertEquals("field", field2.name()); + assertEquals(new BytesRef("false"), field2.binaryValue()); + + IndexableField keyedField2 = fields.get(3); + assertEquals("field._keyed", keyedField2.name()); + assertEquals(new BytesRef("key1\0false"), keyedField2.binaryValue()); + + IndexableField field3 = fields.get(4); + assertEquals("field", field3.name()); + assertEquals(new BytesRef("other"), field3.binaryValue()); + + IndexableField keyedField3 = fields.get(5); + assertEquals("field._keyed", keyedField3.name()); + assertEquals(new BytesRef("key4\0other"), keyedField3.binaryValue()); + } + + public void testNestedObjects() throws Exception { + String input = "{ \"parent1\": { \"key\" : \"value\" }," + + "\"parent2\": { \"key\" : \"value\" }}"; + XContentParser xContentParser = createXContentParser(input); + + List fields = parser.parse(xContentParser); + assertEquals(4, fields.size()); + + IndexableField field1 = fields.get(0); + assertEquals("field", field1.name()); + assertEquals(new BytesRef("value"), field1.binaryValue()); + + IndexableField keyedField1 = fields.get(1); + assertEquals("field._keyed", keyedField1.name()); + assertEquals(new BytesRef("parent1.key\0value"), keyedField1.binaryValue()); + + IndexableField field2 = fields.get(2); + assertEquals("field", field2.name()); + assertEquals(new BytesRef("value"), field2.binaryValue()); + + IndexableField keyedField2 = fields.get(3); + assertEquals("field._keyed", keyedField2.name()); + assertEquals(new BytesRef("parent2.key\0value"), keyedField2.binaryValue()); + } + + public void testDepthLimit() throws Exception { + String input = "{ \"parent1\": { \"key\" : \"value\" }," + + "\"parent2\": [{ \"key\" : { \"key\" : \"value\" }}]}"; + XContentParser xContentParser = createXContentParser(input); + FlatObjectFieldParser configuredParser = new FlatObjectFieldParser("field", "field._keyed", + new FakeFieldType(), 2, Integer.MAX_VALUE); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> configuredParser.parse(xContentParser)); + assertEquals("The provided [flattened] field [field] exceeds the maximum depth limit of [2].", e.getMessage()); + } + + public void testDepthLimitBoundary() throws Exception { + String input = "{ \"parent1\": { \"key\" : \"value\" }," + + "\"parent2\": [{ \"key\" : { \"key\" : \"value\" }}]}"; + XContentParser xContentParser = createXContentParser(input); + FlatObjectFieldParser configuredParser = new FlatObjectFieldParser("field", "field._keyed", + new FakeFieldType(), 3, Integer.MAX_VALUE); + + List fields = configuredParser.parse(xContentParser); + assertEquals(4, fields.size()); + } + + public void testIgnoreAbove() throws Exception { + String input = "{ \"key\": \"a longer field than usual\" }"; + XContentParser xContentParser = createXContentParser(input); + FlatObjectFieldParser configuredParser = new FlatObjectFieldParser("field", "field._keyed", + new FakeFieldType(), Integer.MAX_VALUE, 10); + + List fields = configuredParser.parse(xContentParser); + assertEquals(0, fields.size()); + } + + public void testNullValues() throws Exception { + String input = "{ \"key\": null}"; + XContentParser xContentParser = createXContentParser(input); + + List fields = parser.parse(xContentParser); + assertEquals(0, fields.size()); + + xContentParser = createXContentParser(input); + + MappedFieldType fieldType = new FakeFieldType(); + fieldType.setNullValue("placeholder"); + FlatObjectFieldParser configuredParser = new FlatObjectFieldParser("field", "field._keyed", + fieldType, Integer.MAX_VALUE, Integer.MAX_VALUE); + + fields = configuredParser.parse(xContentParser); + assertEquals(2, fields.size()); + + IndexableField field = fields.get(0); + assertEquals("field", field.name()); + assertEquals(new BytesRef("placeholder"), field.binaryValue()); + + IndexableField keyedField = fields.get(1); + assertEquals("field._keyed", keyedField.name()); + assertEquals(new BytesRef("key\0placeholder"), keyedField.binaryValue()); + } + + public void testMalformedJson() throws Exception { + String input = "{ \"key\": [true, false }"; + XContentParser xContentParser = createXContentParser(input); + + expectThrows(JsonParseException.class, () -> parser.parse(xContentParser)); + } + + public void testEmptyObject() throws Exception { + String input = "{}"; + XContentParser xContentParser = createXContentParser(input); + + List fields = parser.parse(xContentParser); + assertEquals(0, fields.size()); + } + + public void testRandomFields() throws Exception { + BytesReference input = BytesReference.bytes( + XContentBuilder.builder(JsonXContent.jsonXContent) + .startObject() + .startObject("object") + .field("key", "value") + .endObject() + .startArray("array") + .value(2.718) + .endArray() + .endObject()); + + input = XContentTestUtils.insertRandomFields(XContentType.JSON, input, null, random()); + XContentParser xContentParser = createXContentParser(input.utf8ToString()); + + List fields = parser.parse(xContentParser); + assertTrue(fields.size() > 4); + } + + public void testReservedCharacters() throws Exception { + BytesReference input = BytesReference.bytes( + XContentBuilder.builder(JsonXContent.jsonXContent) + .startObject() + .field("k\0y", "value") + .endObject()); + XContentParser xContentParser = createXContentParser(input.utf8ToString()); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse(xContentParser)); + assertEquals("Keys in [flattened] fields cannot contain the reserved character \\0. Offending key: [k\0y].", + e.getMessage()); + } + + private XContentParser createXContentParser(String input) throws IOException { + XContentParser xContentParser = createParser(JsonXContent.jsonXContent, input); + xContentParser.nextToken(); + return xContentParser; + } +} diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectIndexFieldDataTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectIndexFieldDataTests.java new file mode 100644 index 0000000000000..97ca7d4d88ad3 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectIndexFieldDataTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import org.apache.lucene.analysis.core.KeywordAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.flattened.FlattenedMapperPlugin; +import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper.KeyedFlatObjectFieldData; +import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper.KeyedFlatObjectFieldType; + +import java.io.IOException; +import java.util.Collection; +import java.util.concurrent.atomic.AtomicInteger; + +public class FlatObjectIndexFieldDataTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(FlattenedMapperPlugin.class, XPackPlugin.class); + } + + public void testGlobalFieldDataCaching() throws IOException { + // Set up the index service. + IndexService indexService = createIndex("test"); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexFieldDataService ifdService = new IndexFieldDataService(indexService.getIndexSettings(), + indicesService.getIndicesFieldDataCache(), + indicesService.getCircuitBreakerService(), + indexService.mapperService()); + + Mapper.BuilderContext ctx = new Mapper.BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); + FlatObjectFieldMapper fieldMapper = new FlatObjectFieldMapper.Builder("json").build(ctx); + + AtomicInteger onCacheCalled = new AtomicInteger(); + ifdService.setListener(new IndexFieldDataCache.Listener() { + @Override + public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) { + assertEquals(fieldMapper.keyedFieldName(), fieldName); + onCacheCalled.incrementAndGet(); + } + }); + + // Add some documents. + Directory directory = LuceneTestCase.newDirectory(); + IndexWriterConfig config = new IndexWriterConfig(new KeywordAnalyzer()); + IndexWriter writer = new IndexWriter(directory, config); + + Document doc = new Document(); + doc.add(new SortedSetDocValuesField("json._keyed", new BytesRef("some_key\0some_value"))); + writer.addDocument(doc); + writer.commit(); + writer.addDocument(doc); + DirectoryReader reader = ElasticsearchDirectoryReader.wrap( + DirectoryReader.open(writer), + new ShardId("test", "_na_", 1)); + + // Load global field data for subfield 'key'. + KeyedFlatObjectFieldType fieldType1 = fieldMapper.keyedFieldType("key"); + IndexFieldData ifd1 = ifdService.getForField(fieldType1); + assertTrue(ifd1 instanceof KeyedFlatObjectFieldData); + + KeyedFlatObjectFieldData fieldData1 = (KeyedFlatObjectFieldData) ifd1; + assertEquals("key", fieldData1.getKey()); + fieldData1.loadGlobal(reader); + assertEquals(1, onCacheCalled.get()); + + // Load global field data for the subfield 'other_key'. + KeyedFlatObjectFieldType fieldType2 = fieldMapper.keyedFieldType("other_key"); + IndexFieldData ifd2 = ifdService.getForField(fieldType2); + assertTrue(ifd2 instanceof KeyedFlatObjectFieldData); + + KeyedFlatObjectFieldData fieldData2 = (KeyedFlatObjectFieldData) ifd2; + assertEquals("other_key", fieldData2.getKey()); + fieldData2.loadGlobal(reader); + assertEquals(1, onCacheCalled.get()); + + ifdService.clear(); + reader.close(); + writer.close(); + directory.close(); + } +} diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectSearchTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectSearchTests.java new file mode 100644 index 0000000000000..373d1d16d8407 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectSearchTests.java @@ -0,0 +1,512 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; +import org.elasticsearch.search.aggregations.metrics.Cardinality; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.flattened.FlattenedMapperPlugin; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.index.query.QueryBuilders.existsQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; +import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; +import static org.elasticsearch.search.aggregations.AggregationBuilders.cardinality; +import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.notNullValue; + +public class FlatObjectSearchTests extends ESSingleNodeTestCase { + + protected Collection> getPlugins() { + return pluginList(FlattenedMapperPlugin.class, XPackPlugin.class); + } + + @Before + public void setUpIndex() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("flat_object") + .field("type", "flattened") + .field("split_queries_on_whitespace", true) + .endObject() + .startObject("headers") + .field("type", "flattened") + .field("split_queries_on_whitespace", true) + .endObject() + .startObject("labels") + .field("type", "flattened") + .endObject() + .endObject() + .endObject() + .endObject(); + createIndex("test", Settings.EMPTY, "_doc", mapping); + } + + public void testMatchQuery() throws Exception { + client().prepareIndex("test", "_doc", "1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(XContentFactory.jsonBuilder() + .startObject() + .startObject("headers") + .field("content-type", "application/json") + .field("origin", "https://www.elastic.co") + .endObject() + .endObject()) + .get(); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchQuery("headers", "application/json")) + .get(); + assertHitCount(searchResponse, 1L); + + // Check that queries are split on whitespace. + searchResponse = client().prepareSearch() + .setQuery(matchQuery("headers.content-type", "application/json text/plain")) + .get(); + assertHitCount(searchResponse, 1L); + + searchResponse = client().prepareSearch() + .setQuery(matchQuery("headers.origin", "application/json")) + .get(); + assertHitCount(searchResponse, 0L); + } + + public void testMultiMatchQuery() throws Exception { + client().prepareIndex("test", "_doc", "1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(XContentFactory.jsonBuilder() + .startObject() + .startObject("headers") + .field("content-type", "application/json") + .field("origin", "https://www.elastic.co") + .endObject() + .endObject()) + .get(); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(multiMatchQuery("application/json", "headers")) + .get(); + assertHitCount(searchResponse, 1L); + + searchResponse = client().prepareSearch() + .setQuery(multiMatchQuery("application/json text/plain", "headers.content-type")) + .get(); + assertHitCount(searchResponse, 1L); + + searchResponse = client().prepareSearch() + .setQuery(multiMatchQuery("application/json", "headers.origin")) + .get(); + assertHitCount(searchResponse, 0L); + + searchResponse = client().prepareSearch() + .setQuery(multiMatchQuery("application/json", "headers.origin", "headers.contentType")) + .get(); + assertHitCount(searchResponse, 0L); + } + + public void testQueryStringQuery() throws Exception { + client().prepareIndex("test", "_doc", "1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(XContentFactory.jsonBuilder() + .startObject() + .startObject("flat_object") + .field("field1", "value") + .field("field2", "2.718") + .endObject() + .endObject()) + .get(); + + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("flat_object.field1:value")) + .get(); + assertSearchResponse(response); + assertHitCount(response, 1); + + response = client().prepareSearch("test") + .setQuery(queryStringQuery("flat_object.field1:value AND flat_object:2.718")) + .get(); + assertSearchResponse(response); + assertHitCount(response, 1); + + response = client().prepareSearch("test") + .setQuery(queryStringQuery("2.718").field("flat_object.field2")) + .get(); + assertSearchResponse(response); + assertHitCount(response, 1); + } + + public void testSimpleQueryStringQuery() throws Exception { + client().prepareIndex("test", "_doc", "1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(XContentFactory.jsonBuilder() + .startObject() + .startObject("flat_object") + .field("field1", "value") + .field("field2", "2.718") + .endObject() + .endObject()) + .get(); + + SearchResponse response = client().prepareSearch("test") + .setQuery(simpleQueryStringQuery("value").field("flat_object.field1")) + .get(); + assertSearchResponse(response); + assertHitCount(response, 1); + + response = client().prepareSearch("test") + .setQuery(simpleQueryStringQuery("+value +2.718").field("flat_object")) + .get(); + assertSearchResponse(response); + assertHitCount(response, 1); + + response = client().prepareSearch("test") + .setQuery(simpleQueryStringQuery("+value +3.141").field("flat_object")) + .get(); + assertSearchResponse(response); + assertHitCount(response, 0); + } + + public void testExists() throws Exception { + client().prepareIndex("test", "_doc", "1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(XContentFactory.jsonBuilder() + .startObject() + .startObject("headers") + .field("content-type", "application/json") + .endObject() + .endObject()) + .get(); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(existsQuery("headers")) + .get(); + assertHitCount(searchResponse, 1L); + + searchResponse = client().prepareSearch() + .setQuery(existsQuery("headers.content-type")) + .get(); + assertHitCount(searchResponse, 1L); + + searchResponse = client().prepareSearch() + .setQuery(existsQuery("headers.nonexistent")) + .get(); + assertHitCount(searchResponse, 0L); + } + + public void testCardinalityAggregation() throws IOException { + int numDocs = randomIntBetween(2, 100); + int precisionThreshold = randomIntBetween(0, 1 << randomInt(20)); + + BulkRequestBuilder bulkRequest = client().prepareBulk("test", "_doc") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE); + + // Add a random number of documents containing a flat object field, plus + // a small number of dummy documents. + for (int i = 0; i < numDocs; ++i) { + bulkRequest.add(client().prepareIndex() + .setSource(XContentFactory.jsonBuilder().startObject() + .startObject("flat_object") + .field("first", i) + .field("second", i / 2) + .endObject() + .endObject())); + } + + for (int i = 0; i < 10; i++) { + bulkRequest.add(client().prepareIndex("test", "_doc") + .setSource("other_field", "1")); + } + + BulkResponse bulkResponse = bulkRequest.get(); + assertNoFailures(bulkResponse); + + // Test the root flat object field. + SearchResponse response = client().prepareSearch("test") + .addAggregation(cardinality("cardinality") + .precisionThreshold(precisionThreshold) + .field("flat_object")) + .get(); + + assertSearchResponse(response); + Cardinality count = response.getAggregations().get("cardinality"); + assertCardinality(count, numDocs, precisionThreshold); + + // Test two keyed flat object fields. + SearchResponse firstResponse = client().prepareSearch("test") + .addAggregation(cardinality("cardinality") + .precisionThreshold(precisionThreshold) + .field("flat_object.first")) + .get(); + assertSearchResponse(firstResponse); + + Cardinality firstCount = firstResponse.getAggregations().get("cardinality"); + assertCardinality(firstCount, numDocs, precisionThreshold); + + SearchResponse secondResponse = client().prepareSearch("test") + .addAggregation(cardinality("cardinality") + .precisionThreshold(precisionThreshold) + .field("flat_object.second")) + .get(); + assertSearchResponse(secondResponse); + + Cardinality secondCount = secondResponse.getAggregations().get("cardinality"); + assertCardinality(secondCount, (numDocs + 1) / 2, precisionThreshold); + } + + private void assertCardinality(Cardinality count, long value, int precisionThreshold) { + if (value <= precisionThreshold) { + // linear counting should be picked, and should be accurate + assertEquals(value, count.getValue()); + } else { + // error is not bound, so let's just make sure it is > 0 + assertThat(count.getValue(), greaterThan(0L)); + } + } + + public void testTermsAggregation() throws IOException { + BulkRequestBuilder bulkRequest = client().prepareBulk("test", "_doc") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE); + for (int i = 0; i < 5; i++) { + bulkRequest.add(client().prepareIndex() + .setSource(XContentFactory.jsonBuilder().startObject() + .startObject("labels") + .field("priority", "urgent") + .field("release", "v1.2." + i) + .endObject() + .endObject())); + } + + BulkResponse bulkResponse = bulkRequest.get(); + assertNoFailures(bulkResponse); + + // Aggregate on the root 'labels' field. + TermsAggregationBuilder builder = createTermsAgg("labels"); + SearchResponse response = client().prepareSearch("test") + .addAggregation(builder) + .get(); + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(6)); + + Terms.Bucket bucket1 = terms.getBuckets().get(0); + assertEquals("urgent", bucket1.getKey()); + assertEquals(5, bucket1.getDocCount()); + + Terms.Bucket bucket2 = terms.getBuckets().get(1); + assertThat(bucket2.getKeyAsString(), startsWith("v1.2.")); + assertEquals(1, bucket2.getDocCount()); + + // Aggregate on the 'priority' subfield. + TermsAggregationBuilder priorityAgg = createTermsAgg("labels.priority"); + SearchResponse priorityResponse = client().prepareSearch("test") + .addAggregation(priorityAgg) + .get(); + assertSearchResponse(priorityResponse); + + Terms priorityTerms = priorityResponse.getAggregations().get("terms"); + assertThat(priorityTerms, notNullValue()); + assertThat(priorityTerms.getName(), equalTo("terms")); + assertThat(priorityTerms.getBuckets().size(), equalTo(1)); + + Terms.Bucket priorityBucket = priorityTerms.getBuckets().get(0); + assertEquals("urgent", priorityBucket.getKey()); + assertEquals(5, priorityBucket.getDocCount()); + + // Aggregate on the 'release' subfield. + TermsAggregationBuilder releaseAgg = createTermsAgg("labels.release"); + SearchResponse releaseResponse = client().prepareSearch("test") + .addAggregation(releaseAgg) + .get(); + assertSearchResponse(releaseResponse); + + Terms releaseTerms = releaseResponse.getAggregations().get("terms"); + assertThat(releaseTerms, notNullValue()); + assertThat(releaseTerms.getName(), equalTo("terms")); + assertThat(releaseTerms.getBuckets().size(), equalTo(5)); + + for (Terms.Bucket bucket : releaseTerms.getBuckets()) { + assertThat(bucket.getKeyAsString(), startsWith("v1.2.")); + assertEquals(1, bucket.getDocCount()); + } + + // Aggregate on the 'priority' subfield with a min_doc_count of 0. + TermsAggregationBuilder minDocCountAgg = createTermsAgg("labels.priority") + .minDocCount(0); + SearchResponse minDocCountResponse = client().prepareSearch("test") + .addAggregation(minDocCountAgg) + .get(); + assertSearchResponse(minDocCountResponse); + + Terms minDocCountTerms = minDocCountResponse.getAggregations().get("terms"); + assertThat(minDocCountTerms, notNullValue()); + assertThat(minDocCountTerms.getName(), equalTo("terms")); + assertThat(minDocCountTerms.getBuckets().size(), equalTo(1)); + } + + private TermsAggregationBuilder createTermsAgg(String field) { + TermsAggregatorFactory.ExecutionMode executionMode = randomFrom( + TermsAggregatorFactory.ExecutionMode.values()); + Aggregator.SubAggCollectionMode collectionMode = randomFrom( + Aggregator.SubAggCollectionMode.values()); + + return terms("terms") + .field(field) + .collectMode(collectionMode) + .executionHint(executionMode.toString()); + } + + + public void testLoadDocValuesFields() throws Exception { + client().prepareIndex("test", "_doc", "1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(XContentFactory.jsonBuilder() + .startObject() + .startObject("flat_object") + .field("key", "value") + .field("other_key", "other_value") + .endObject() + .endObject()) + .get(); + + SearchResponse response = client().prepareSearch("test") + .addDocValueField("flat_object") + .addDocValueField("flat_object.key") + .get(); + assertSearchResponse(response); + assertHitCount(response, 1); + + Map fields = response.getHits().getAt(0).getFields(); + + DocumentField field = fields.get("flat_object"); + assertEquals("flat_object", field.getName()); + assertEquals(Arrays.asList("other_value", "value"), field.getValues()); + + DocumentField keyedField = fields.get("flat_object.key"); + assertEquals("flat_object.key", keyedField.getName()); + assertEquals("value", keyedField.getValue()); + } + + public void testFieldSort() throws Exception { + client().prepareIndex("test", "_doc", "1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(XContentFactory.jsonBuilder() + .startObject() + .startObject("flat_object") + .field("key", "A") + .field("other_key", "D") + .endObject() + .endObject()) + .get(); + + client().prepareIndex("test", "_doc", "2") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(XContentFactory.jsonBuilder() + .startObject() + .startObject("flat_object") + .field("key", "B") + .field("other_key", "C") + .endObject() + .endObject()) + .get(); + + client().prepareIndex("test", "_doc", "3") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(XContentFactory.jsonBuilder() + .startObject() + .startObject("flat_object") + .field("other_key", "E") + .endObject() + .endObject()) + .get(); + + SearchResponse response = client().prepareSearch("test") + .addSort("flat_object", SortOrder.DESC) + .get(); + assertSearchResponse(response); + assertHitCount(response, 3); + assertOrderedSearchHits(response, "3", "1", "2"); + + response = client().prepareSearch("test") + .addSort("flat_object.key", SortOrder.DESC) + .get(); + assertSearchResponse(response); + assertHitCount(response, 3); + assertOrderedSearchHits(response, "2", "1", "3"); + + response = client().prepareSearch("test") + .addSort(new FieldSortBuilder("flat_object.key").order(SortOrder.DESC).missing("Z")) + .get(); + assertSearchResponse(response); + assertHitCount(response, 3); + assertOrderedSearchHits(response, "3", "2", "1"); + } + + public void testSourceFiltering() { + Map headers = new HashMap<>(); + headers.put("content-type", "application/json"); + headers.put("origin", "https://www.elastic.co"); + Map source = Collections.singletonMap("headers", headers); + + client().prepareIndex("test", "_doc", "1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .setSource(source) + .get(); + + SearchResponse response = client().prepareSearch("test").setFetchSource(true).get(); + assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)); + + // Check 'include' filtering. + response = client().prepareSearch("test").setFetchSource("headers", null).get(); + assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)); + + response = client().prepareSearch("test").setFetchSource("headers.content-type", null).get(); + Map filteredSource = Collections.singletonMap("headers", + Collections.singletonMap("content-type", "application/json")); + assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + + // Check 'exclude' filtering. + response = client().prepareSearch("test").setFetchSource(null, "headers.content-type").get(); + filteredSource = Collections.singletonMap("headers", + Collections.singletonMap("origin", "https://www.elastic.co")); + assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + } +} diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectAtomicFieldDataTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectAtomicFieldDataTests.java new file mode 100644 index 0000000000000..bb6c195395d4e --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectAtomicFieldDataTests.java @@ -0,0 +1,204 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; +import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; +import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; + +import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; + +public class KeyedFlatObjectAtomicFieldDataTests extends ESTestCase { + private AtomicOrdinalsFieldData delegate; + + @Before + public void setUpDelegate() { + BytesRef[] allTerms = new BytesRef[60]; + long[] documentOrds = new long[50]; + int index = 0; + + for (int ord = 0; ord < allTerms.length; ord++) { + String key; + if (ord < 20) { + key = "apple"; + } else if (ord < 30) { + key = "avocado"; + } else if (ord < 40) { + key = "banana"; + } else if (ord < 41) { + key = "cantaloupe"; + } else { + key = "cucumber"; + } + + allTerms[ord] = prefixedValue(key, "value" + ord); + + // Do not include the term 'avocado' in the mock document. + if (key.equals("avocado") == false) { + documentOrds[index++] = ord; + } + } + + delegate = new MockAtomicOrdinalsFieldData(allTerms, documentOrds); + } + + private BytesRef prefixedValue(String key, String value) { + String term = FlatObjectFieldParser.createKeyedValue(key, value); + return new BytesRef(term); + } + + public void testFindOrdinalBounds() throws IOException { + testFindOrdinalBounds("apple", delegate, 0, 19); + testFindOrdinalBounds("avocado", delegate, 20, 29); + testFindOrdinalBounds("banana", delegate, 30, 39); + testFindOrdinalBounds("berry", delegate, -1, -1); + testFindOrdinalBounds("cantaloupe", delegate, 40, 40); + testFindOrdinalBounds("cucumber", delegate, 41, 59); + + AtomicOrdinalsFieldData emptyDelegate = new MockAtomicOrdinalsFieldData(new BytesRef[0], new long[0]); + testFindOrdinalBounds("apple", emptyDelegate, -1, -1); + + BytesRef[] terms = new BytesRef[] { prefixedValue("prefix", "value") }; + AtomicOrdinalsFieldData singleValueDelegate = new MockAtomicOrdinalsFieldData(terms, new long[0]); + testFindOrdinalBounds("prefix", singleValueDelegate, 0, 0); + testFindOrdinalBounds("prefix1", singleValueDelegate, -1, -1); + + terms = new BytesRef[] { prefixedValue("prefix", "value"), + prefixedValue("prefix1", "value"), + prefixedValue("prefix1", "value1"), + prefixedValue("prefix2", "value"), + prefixedValue("prefix3", "value")}; + AtomicOrdinalsFieldData oddLengthDelegate = new MockAtomicOrdinalsFieldData(terms, new long[0]); + testFindOrdinalBounds("prefix", oddLengthDelegate, 0, 0); + testFindOrdinalBounds("prefix1", oddLengthDelegate, 1, 2); + testFindOrdinalBounds("prefix2", oddLengthDelegate, 3, 3); + testFindOrdinalBounds("prefix3", oddLengthDelegate, 4, 4); + } + + public void testFindOrdinalBounds(String key, + AtomicOrdinalsFieldData delegate, + long expectedMinOrd, + long expectedMacOrd) throws IOException { + BytesRef bytesKey = new BytesRef(key); + + long actualMinOrd = KeyedFlatObjectAtomicFieldData.findMinOrd(bytesKey, delegate.getOrdinalsValues()); + assertEquals(expectedMinOrd, actualMinOrd); + + long actualMaxOrd = KeyedFlatObjectAtomicFieldData.findMaxOrd(bytesKey, delegate.getOrdinalsValues()); + assertEquals(expectedMacOrd, actualMaxOrd); + } + + public void testAdvanceExact() throws IOException { + AtomicOrdinalsFieldData avocadoFieldData = new KeyedFlatObjectAtomicFieldData("avocado", delegate); + assertFalse(avocadoFieldData.getOrdinalsValues().advanceExact(0)); + + AtomicOrdinalsFieldData bananaFieldData = new KeyedFlatObjectAtomicFieldData("banana", delegate); + assertTrue(bananaFieldData.getOrdinalsValues().advanceExact(0)); + + AtomicOrdinalsFieldData nonexistentFieldData = new KeyedFlatObjectAtomicFieldData("berry", delegate); + assertFalse(nonexistentFieldData.getOrdinalsValues().advanceExact(0)); + } + + public void testNextOrd() throws IOException { + AtomicOrdinalsFieldData fieldData = new KeyedFlatObjectAtomicFieldData("banana", delegate); + SortedSetDocValues docValues = fieldData.getOrdinalsValues(); + docValues.advanceExact(0); + + int retrievedOrds = 0; + for (long ord = docValues.nextOrd(); ord != NO_MORE_ORDS; ord = docValues.nextOrd()) { + assertTrue(0 <= ord && ord < 10); + retrievedOrds++; + + BytesRef expectedValue = new BytesRef("value" + (ord + 30)); + BytesRef actualValue = docValues.lookupOrd(ord); + assertEquals(expectedValue, actualValue); + } + + assertEquals(10, retrievedOrds); + } + + public void testLookupOrd() throws IOException { + AtomicOrdinalsFieldData appleFieldData = new KeyedFlatObjectAtomicFieldData("apple", delegate); + SortedSetDocValues appleDocValues = appleFieldData.getOrdinalsValues(); + assertEquals(new BytesRef("value0"), appleDocValues.lookupOrd(0)); + + AtomicOrdinalsFieldData cantaloupeFieldData = new KeyedFlatObjectAtomicFieldData("cantaloupe", delegate); + SortedSetDocValues cantaloupeDocValues = cantaloupeFieldData.getOrdinalsValues(); + assertEquals(new BytesRef("value40"), cantaloupeDocValues.lookupOrd(0)); + + AtomicOrdinalsFieldData cucumberFieldData = new KeyedFlatObjectAtomicFieldData("cucumber", delegate); + SortedSetDocValues cucumberDocValues = cucumberFieldData.getOrdinalsValues(); + assertEquals(new BytesRef("value41"), cucumberDocValues.lookupOrd(0)); + } + + private static class MockAtomicOrdinalsFieldData extends AbstractAtomicOrdinalsFieldData { + private final SortedSetDocValues docValues; + + MockAtomicOrdinalsFieldData(BytesRef[] allTerms, + long[] documentOrds) { + super(AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION); + this.docValues = new MockSortedSetDocValues(allTerms, documentOrds); + } + + @Override + public SortedSetDocValues getOrdinalsValues() { + return docValues; + } + + @Override + public long ramBytesUsed() { + return 0; + } + + @Override + public void close() { + // Nothing to do. + } + } + + private static class MockSortedSetDocValues extends AbstractSortedSetDocValues { + private final BytesRef[] allTerms; + private final long[] documentOrds; + private int index; + + MockSortedSetDocValues(BytesRef[] allTerms, + long[] documentOrds) { + this.allTerms = allTerms; + this.documentOrds = documentOrds; + } + + @Override + public boolean advanceExact(int docID) { + index = 0; + return true; + } + + @Override + public long nextOrd() { + if (index == documentOrds.length) { + return NO_MORE_ORDS; + } + return documentOrds[index++]; + } + + @Override + public BytesRef lookupOrd(long ord) { + return allTerms[(int) ord]; + } + + @Override + public long getValueCount() { + return allTerms.length; + } + } +} diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectFieldTypeTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectFieldTypeTests.java new file mode 100644 index 0000000000000..46901035c8a96 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/KeyedFlatObjectFieldTypeTests.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermInSetQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper.KeyedFlatObjectFieldType; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.List; + +public class KeyedFlatObjectFieldTypeTests extends FieldTypeTestCase { + + @Before + public void setupProperties() { + addModifier(new Modifier("split_queries_on_whitespace", true) { + @Override + public void modify(MappedFieldType type) { + KeyedFlatObjectFieldType ft = (KeyedFlatObjectFieldType) type; + ft.setSplitQueriesOnWhitespace(!ft.splitQueriesOnWhitespace()); + } + }); + } + + @Override + protected KeyedFlatObjectFieldType createDefaultFieldType() { + return new KeyedFlatObjectFieldType("key"); + } + + public void testIndexedValueForSearch() { + KeyedFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + BytesRef keywordValue = ft.indexedValueForSearch("value"); + assertEquals(new BytesRef("key\0value"), keywordValue); + + BytesRef doubleValue = ft.indexedValueForSearch(2.718); + assertEquals(new BytesRef("key\0" + "2.718"), doubleValue); + + BytesRef booleanValue = ft.indexedValueForSearch(true); + assertEquals(new BytesRef("key\0true"), booleanValue); + } + + public void testTermQuery() { + KeyedFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + Query expected = new TermQuery(new Term("field", "key\0value")); + assertEquals(expected, ft.termQuery("value", null)); + + ft.setIndexOptions(IndexOptions.NONE); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> ft.termQuery("field", null)); + assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + } + + public void testTermsQuery() { + KeyedFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + Query expected = new TermInSetQuery("field", + new BytesRef("key\0value1"), + new BytesRef("key\0value2")); + + List terms = new ArrayList<>(); + terms.add("value1"); + terms.add("value2"); + Query actual = ft.termsQuery(terms, null); + + assertEquals(expected, actual); + } + + public void testExistsQuery() { + KeyedFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + Query expected = new PrefixQuery(new Term("field", "key\0")); + assertEquals(expected, ft.existsQuery(null)); + } + + public void testPrefixQuery() { + KeyedFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + Query expected = new PrefixQuery(new Term("field", "key\0val")); + assertEquals(expected, ft.prefixQuery("val", MultiTermQuery.CONSTANT_SCORE_REWRITE, null)); + } + + public void testFuzzyQuery() { + KeyedFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, + () -> ft.fuzzyQuery("valuee", Fuzziness.fromEdits(2), 1, 50, true)); + assertEquals("[fuzzy] queries are not currently supported on keyed [flattened] fields.", e.getMessage()); + } + + public void testRangeQuery() { + KeyedFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + TermRangeQuery expected = new TermRangeQuery("field", + new BytesRef("key\0lower"), + new BytesRef("key\0upper"), false, false); + assertEquals(expected, ft.rangeQuery("lower", "upper", false, false, null)); + + expected = new TermRangeQuery("field", + new BytesRef("key\0lower"), + new BytesRef("key\0upper"), true, true); + assertEquals(expected, ft.rangeQuery("lower", "upper", true, true, null)); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + ft.rangeQuery("lower", null, false, false, null)); + assertEquals("[range] queries on keyed [flattened] fields must include both an upper and a lower bound.", + e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, () -> + ft.rangeQuery(null, "upper", false, false, null)); + assertEquals("[range] queries on keyed [flattened] fields must include both an upper and a lower bound.", + e.getMessage()); + } + + public void testRegexpQuery() { + KeyedFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, + () -> ft.regexpQuery("valu*", 0, 10, null, null)); + assertEquals("[regexp] queries are not currently supported on keyed [flattened] fields.", e.getMessage()); + } + + public void testWildcardQuery() { + KeyedFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, + () -> ft.wildcardQuery("valu*", null, null)); + assertEquals("[wildcard] queries are not currently supported on keyed [flattened] fields.", e.getMessage()); + } +} diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/RootFlatObjectFieldTypeTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/RootFlatObjectFieldTypeTests.java new file mode 100644 index 0000000000000..be297663c6e74 --- /dev/null +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/RootFlatObjectFieldTypeTests.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.flattened.mapper; + +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.WildcardQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper.RootFlatObjectFieldType; +import org.junit.Before; + +public class RootFlatObjectFieldTypeTests extends FieldTypeTestCase { + + @Before + public void setupProperties() { + addModifier(new Modifier("split_queries_on_whitespace", true) { + @Override + public void modify(MappedFieldType type) { + RootFlatObjectFieldType ft = (RootFlatObjectFieldType) type; + ft.setSplitQueriesOnWhitespace(!ft.splitQueriesOnWhitespace()); + } + }); + } + + @Override + protected RootFlatObjectFieldType createDefaultFieldType() { + return new RootFlatObjectFieldType(); + } + + public void testValueForDisplay() { + RootFlatObjectFieldType ft = createDefaultFieldType(); + + String fieldValue = "{ \"key\": \"value\" }"; + BytesRef storedValue = new BytesRef(fieldValue); + assertEquals(fieldValue, ft.valueForDisplay(storedValue)); + } + + public void testTermQuery() { + RootFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + Query expected = new TermQuery(new Term("field", "value")); + assertEquals(expected, ft.termQuery("value", null)); + + ft.setIndexOptions(IndexOptions.NONE); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> ft.termQuery("field", null)); + assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + } + + public void testExistsQuery() { + RootFlatObjectFieldType ft = new RootFlatObjectFieldType(); + ft.setName("field"); + assertEquals( + new TermQuery(new Term(FieldNamesFieldMapper.NAME, new BytesRef("field"))), + ft.existsQuery(null)); + + ft.setHasDocValues(true); + assertEquals(new DocValuesFieldExistsQuery("field"), ft.existsQuery(null)); + } + + public void testFuzzyQuery() { + RootFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + Query expected = new FuzzyQuery(new Term("field", "value"), 2, 1, 50, true); + Query actual = ft.fuzzyQuery("value", Fuzziness.fromEdits(2), 1, 50, true); + assertEquals(expected, actual); + } + + public void testRangeQuery() { + RootFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + TermRangeQuery expected = new TermRangeQuery("field", + new BytesRef("lower"), + new BytesRef("upper"), false, false); + assertEquals(expected, ft.rangeQuery("lower", "upper", false, false, null)); + + expected = new TermRangeQuery("field", + new BytesRef("lower"), + new BytesRef("upper"), true, true); + assertEquals(expected, ft.rangeQuery("lower", "upper", true, true, null)); + } + + public void testRegexpQuery() { + RootFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + Query expected = new RegexpQuery(new Term("field", "val.*")); + Query actual = ft.regexpQuery("val.*", 0, 10, null, null); + assertEquals(expected, actual); + } + + public void testWildcardQuery() { + RootFlatObjectFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + Query expected = new WildcardQuery(new Term("field", new BytesRef("valu*"))); + assertEquals(expected, ft.wildcardQuery("valu*", null, null)); + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/flattened/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/flattened/10_basic.yml new file mode 100644 index 0000000000000..c750abb7895f0 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/flattened/10_basic.yml @@ -0,0 +1,111 @@ +--- +"Test exists query on flattened object field": + - skip: + version: " - 7.99.99" + reason: "Flat object fields are currently only implemented in 8.0." + + - do: + indices.create: + index: flat_object_test + body: + mappings: + properties: + flat_object: + type: flattened + - do: + index: + index: flat_object_test + id: 1 + body: + flat_object: + key: some_value + refresh: true + + - do: + search: + index: flat_object_test + body: + query: + exists: + field: flat_object + + - match: { hits.total.value: 1 } + + - do: + search: + index: flat_object_test + body: + query: + exists: + field: flat_object.key + + - match: { hits.total.value: 1 } + + - do: + search: + index: flat_object_test + body: + query: + exists: + field: flat_object.nonexistent_key + + - match: { hits.total.value: 0 } + +--- +"Test query string query on flattened object field": + - skip: + version: " - 7.99.99" + reason: "Flat object fields are currently only implemented in 8.0." + + - do: + indices.create: + index: test + body: + mappings: + properties: + headers: + type: flattened + + - do: + index: + index: test + id: 1 + body: + headers: + content-type: application/javascript + origin: elastic.co + refresh: true + + - do: + index: + index: test + id: 2 + body: + headers: + content-type: text/plain + origin: elastic.co + refresh: true + + - do: + search: + index: test + body: + query: + query_string: + query: "headers:text\\/plain" + + - match: { hits.total.value: 1 } + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "2" } + + - do: + search: + index: test + body: + query: + query_string: + query: "application\\/javascript AND headers.origin:elastic.co" + + - match: { hits.total.value: 1 } + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "1" } From 623a5ddac092c3443290580f6b145559e05d64f0 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Fri, 28 Jun 2019 08:35:20 -0400 Subject: [PATCH 057/140] [DOCS] Rewrite boosting query (#43647) --- .../query-dsl/boosting-query.asciidoc | 52 ++++++++++++++----- 1 file changed, 38 insertions(+), 14 deletions(-) diff --git a/docs/reference/query-dsl/boosting-query.asciidoc b/docs/reference/query-dsl/boosting-query.asciidoc index 5cd12ce1f00b7..c57235e71606d 100644 --- a/docs/reference/query-dsl/boosting-query.asciidoc +++ b/docs/reference/query-dsl/boosting-query.asciidoc @@ -1,36 +1,60 @@ [[query-dsl-boosting-query]] === Boosting Query -The `boosting` query can be used to effectively demote results that -match a given query. Unlike the "NOT" clause in bool query, this still -selects documents that contain undesirable terms, but reduces their -overall score. +Returns documents matching a `positive` query while reducing the +<> of documents that also match a +`negative` query. -It accepts a `positive` query and a `negative` query. -Only documents that match the `positive` query will be included -in the results list, but documents that also match the `negative` query -will be downgraded by multiplying the original `_score` of the document -with the `negative_boost`. +You can use the `boosting` query to demote certain documents without +excluding them from the search results. + +[[boosting-query-ex-request]] +==== Example request [source,js] --------------------------------------------------- +---- GET /_search { "query": { "boosting" : { "positive" : { "term" : { - "field1" : "value1" + "text" : "apple" } }, "negative" : { "term" : { - "field2" : "value2" + "text" : "pie tart fruit crumble tree" } }, - "negative_boost" : 0.2 + "negative_boost" : 0.5 } } } --------------------------------------------------- +---- // CONSOLE + +[[boosting-top-level-params]] +==== Top-level parameters for `boosting` + +`positive` (Required):: +Query you wish to run. Any returned documents must match this query. + +`negative` (Required):: ++ +-- +Query used to decrease the <> of matching +documents. + +If a returned document matches the `positive` query and this query, the +`boosting` query calculates the final <> +for the document as follows: + +. Take the original relevance score from the `positive` query. +. Multiply the score by the `negative_boost` value. +-- + +`negative_boost` (Required):: +Floating point number between `0` and `1.0` used to decrease the +<> of documents matching the `negative` +query. \ No newline at end of file From 89a3eb3c6f8b291927c932885425886e4834a0e4 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 28 Jun 2019 13:58:06 +0100 Subject: [PATCH 058/140] Wildcard intervals (#43691) This commit adds a wildcard intervals source, similar to the prefix. It also changes the term parameter in prefix to read prefix, to bring it in to line with the pattern parameter in wildcard. Closes #43198 --- .../query-dsl/intervals-query.asciidoc | 28 ++++ .../test/search/230_interval_query.yml | 20 +++ .../index/query/IntervalsSourceProvider.java | 143 ++++++++++++++++-- .../elasticsearch/search/SearchModule.java | 2 + .../query/IntervalQueryBuilderTests.java | 80 +++++++++- 5 files changed, 258 insertions(+), 15 deletions(-) diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc index 951147a21ac50..7353ca137f3e1 100644 --- a/docs/reference/query-dsl/intervals-query.asciidoc +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -101,6 +101,34 @@ If specified, then match intervals from this field rather than the top-level fie The `prefix` will be normalized using the search analyzer from this field, unless `analyzer` is specified separately. +[[intervals-wildcard]] +==== `wildcard` + +The `wildcard` rule finds terms that match a wildcard pattern. The pattern will +expand to match at most 128 terms; if there are more matching terms in the index, +then an error will be returned. + +[horizontal] +`pattern`:: +Find terms matching this pattern ++ +-- +This parameter supports two wildcard operators: + +* `?`, which matches any single character +* `*`, which can match zero or more characters, including an empty one + +WARNING: Avoid beginning patterns with `*` or `?`. This can increase +the iterations needed to find matching terms and slow search performance. +-- +`analyzer`:: +Which analyzer should be used to normalize the `pattern`. By default, the +search analyzer of the top-level field will be used. +`use_field`:: +If specified, then match intervals from this field rather than the top-level field. +The `pattern` will be normalized using the search analyzer from this field, unless +`analyzer` is specified separately. + [[intervals-all_of]] ==== `all_of` diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml index c5238e237e580..82aa0883008a8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml @@ -404,3 +404,23 @@ setup: prefix: out - match: { hits.total.value: 3 } +--- +"Test wildcard": + - skip: + version: " - 8.0.0" + reason: "TODO: change to 7.3 in backport" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cold + - wildcard: + pattern: out?ide + - match: { hits.total.value: 3 } + diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java index 234018971ed59..d4d28057c12d0 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java @@ -19,10 +19,12 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.intervals.FilteredIntervalsSource; import org.apache.lucene.search.intervals.IntervalIterator; import org.apache.lucene.search.intervals.Intervals; import org.apache.lucene.search.intervals.IntervalsSource; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -80,6 +82,8 @@ public static IntervalsSourceProvider fromXContent(XContentParser parser) throws return Combine.fromXContent(parser); case "prefix": return Prefix.fromXContent(parser); + case "wildcard": + return Wildcard.fromXContent(parser); } throw new ParsingException(parser.getTokenLocation(), "Unknown interval type [" + parser.currentName() + "], expecting one of [match, any_of, all_of, prefix]"); @@ -446,18 +450,18 @@ public static class Prefix extends IntervalsSourceProvider { public static final String NAME = "prefix"; - private final String term; + private final String prefix; private final String analyzer; private final String useField; - public Prefix(String term, String analyzer, String useField) { - this.term = term; + public Prefix(String prefix, String analyzer, String useField) { + this.prefix = prefix; this.analyzer = analyzer; this.useField = useField; } public Prefix(StreamInput in) throws IOException { - this.term = in.readString(); + this.prefix = in.readString(); this.analyzer = in.readOptionalString(); this.useField = in.readOptionalString(); } @@ -472,10 +476,10 @@ public IntervalsSource getSource(QueryShardContext context, MappedFieldType fiel if (useField != null) { fieldType = context.fieldMapper(useField); assert fieldType != null; - source = Intervals.fixField(useField, fieldType.intervals(term, 0, false, analyzer, true)); + source = Intervals.fixField(useField, fieldType.intervals(prefix, 0, false, analyzer, true)); } else { - source = fieldType.intervals(term, 0, false, analyzer, true); + source = fieldType.intervals(prefix, 0, false, analyzer, true); } return source; } @@ -492,14 +496,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Prefix prefix = (Prefix) o; - return Objects.equals(term, prefix.term) && + return Objects.equals(this.prefix, prefix.prefix) && Objects.equals(analyzer, prefix.analyzer) && Objects.equals(useField, prefix.useField); } @Override public int hashCode() { - return Objects.hash(term, analyzer, useField); + return Objects.hash(prefix, analyzer, useField); } @Override @@ -509,7 +513,7 @@ public String getWriteableName() { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(term); + out.writeString(prefix); out.writeOptionalString(analyzer); out.writeOptionalString(useField); } @@ -517,7 +521,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); - builder.field("term", term); + builder.field("prefix", prefix); if (analyzer != null) { builder.field("analyzer", analyzer); } @@ -535,7 +539,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return new Prefix(term, analyzer, useField); }); static { - PARSER.declareString(constructorArg(), new ParseField("term")); + PARSER.declareString(constructorArg(), new ParseField("prefix")); PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); } @@ -545,6 +549,123 @@ public static Prefix fromXContent(XContentParser parser) throws IOException { } } + public static class Wildcard extends IntervalsSourceProvider { + + public static final String NAME = "wildcard"; + + private final String pattern; + private final String analyzer; + private final String useField; + + public Wildcard(String pattern, String analyzer, String useField) { + this.pattern = pattern; + this.analyzer = analyzer; + this.useField = useField; + } + + public Wildcard(StreamInput in) throws IOException { + this.pattern = in.readString(); + this.analyzer = in.readOptionalString(); + this.useField = in.readOptionalString(); + } + + @Override + public IntervalsSource getSource(QueryShardContext context, MappedFieldType fieldType) { + NamedAnalyzer analyzer = fieldType.searchAnalyzer(); + if (this.analyzer != null) { + analyzer = context.getMapperService().getIndexAnalyzers().get(this.analyzer); + } + IntervalsSource source; + if (useField != null) { + fieldType = context.fieldMapper(useField); + assert fieldType != null; + checkPositions(fieldType); + if (this.analyzer == null) { + analyzer = fieldType.searchAnalyzer(); + } + BytesRef normalizedTerm = analyzer.normalize(useField, pattern); + // TODO Intervals.wildcard() should take BytesRef + source = Intervals.fixField(useField, Intervals.wildcard(normalizedTerm.utf8ToString())); + } + else { + checkPositions(fieldType); + BytesRef normalizedTerm = analyzer.normalize(fieldType.name(), pattern); + source = Intervals.wildcard(normalizedTerm.utf8ToString()); + } + return source; + } + + private void checkPositions(MappedFieldType type) { + if (type.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { + throw new IllegalArgumentException("Cannot create intervals over field [" + type.name() + "] with no positions indexed"); + } + } + + @Override + public void extractFields(Set fields) { + if (useField != null) { + fields.add(useField); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Prefix prefix = (Prefix) o; + return Objects.equals(pattern, prefix.prefix) && + Objects.equals(analyzer, prefix.analyzer) && + Objects.equals(useField, prefix.useField); + } + + @Override + public int hashCode() { + return Objects.hash(pattern, analyzer, useField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(pattern); + out.writeOptionalString(analyzer); + out.writeOptionalString(useField); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field("pattern", pattern); + if (analyzer != null) { + builder.field("analyzer", analyzer); + } + if (useField != null) { + builder.field("use_field", useField); + } + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { + String term = (String) args[0]; + String analyzer = (String) args[1]; + String useField = (String) args[2]; + return new Wildcard(term, analyzer, useField); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("pattern")); + PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); + PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); + } + + public static Wildcard fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + } + static class ScriptFilterSource extends FilteredIntervalsSource { final IntervalFilterScript script; diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 5eb82854a2097..fc3bdcfda8ecf 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -794,6 +794,8 @@ private void registerIntervalsSourceProviders() { IntervalsSourceProvider.Disjunction.NAME, IntervalsSourceProvider.Disjunction::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(IntervalsSourceProvider.class, IntervalsSourceProvider.Prefix.NAME, IntervalsSourceProvider.Prefix::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(IntervalsSourceProvider.class, + IntervalsSourceProvider.Wildcard.NAME, IntervalsSourceProvider.Wildcard::new)); } private void registerQuery(QuerySpec spec) { diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 7838f77cc1697..c480b52c6dc00 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -393,32 +393,104 @@ public FactoryType compile(Script script, ScriptContext { IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(no_positions_json); builder1.toQuery(createShardContext()); }); + String no_positions_fixed_field_json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + + "\"prefix\" : { \"prefix\" : \"term\", \"use_field\" : \"" + NO_POSITIONS_FIELD + "\" } } } }"; + expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(no_positions_fixed_field_json); + builder1.toQuery(createShardContext()); + }); + String prefix_json = "{ \"intervals\" : { \"" + PREFIXED_FIELD + "\": { " + - "\"prefix\" : { \"term\" : \"term\" } } } }"; + "\"prefix\" : { \"prefix\" : \"term\" } } } }"; builder = (IntervalQueryBuilder) parseQuery(prefix_json); expected = new IntervalQuery(PREFIXED_FIELD, Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.term("term"))); assertEquals(expected, builder.toQuery(createShardContext())); String short_prefix_json = "{ \"intervals\" : { \"" + PREFIXED_FIELD + "\": { " + - "\"prefix\" : { \"term\" : \"t\" } } } }"; + "\"prefix\" : { \"prefix\" : \"t\" } } } }"; builder = (IntervalQueryBuilder) parseQuery(short_prefix_json); expected = new IntervalQuery(PREFIXED_FIELD, Intervals.or( Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.wildcard("t?")), Intervals.term("t"))); assertEquals(expected, builder.toQuery(createShardContext())); + String fix_field_prefix_json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + + "\"prefix\" : { \"prefix\" : \"term\", \"use_field\" : \"" + PREFIXED_FIELD + "\" } } } }"; + builder = (IntervalQueryBuilder) parseQuery(fix_field_prefix_json); + // This looks weird, but it's fine, because the innermost fixField wins + expected = new IntervalQuery(STRING_FIELD_NAME, + Intervals.fixField(PREFIXED_FIELD, Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.term("term")))); + assertEquals(expected, builder.toQuery(createShardContext())); + + String keyword_json = "{ \"intervals\" : { \"" + PREFIXED_FIELD + "\": { " + + "\"prefix\" : { \"prefix\" : \"Term\", \"analyzer\" : \"keyword\" } } } }"; + builder = (IntervalQueryBuilder) parseQuery(keyword_json); + expected = new IntervalQuery(PREFIXED_FIELD, Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.term("Term"))); + assertEquals(expected, builder.toQuery(createShardContext())); + + String keyword_fix_field_json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + + "\"prefix\" : { \"prefix\" : \"Term\", \"analyzer\" : \"keyword\", \"use_field\" : \"" + PREFIXED_FIELD + "\" } } } }"; + builder = (IntervalQueryBuilder) parseQuery(keyword_fix_field_json); + expected = new IntervalQuery(STRING_FIELD_NAME, + Intervals.fixField(PREFIXED_FIELD, Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.term("Term")))); + assertEquals(expected, builder.toQuery(createShardContext())); + } + + public void testWildcard() throws IOException { + + String json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + + "\"wildcard\" : { \"pattern\" : \"Te?m\" } } } }"; + + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery(STRING_FIELD_NAME, Intervals.wildcard("te?m")); + assertEquals(expected, builder.toQuery(createShardContext())); + + String no_positions_json = "{ \"intervals\" : { \"" + NO_POSITIONS_FIELD + "\": { " + + "\"wildcard\" : { \"pattern\" : \"term\" } } } }"; + expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(no_positions_json); + builder1.toQuery(createShardContext()); + }); + + String keyword_json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + + "\"wildcard\" : { \"pattern\" : \"Te?m\", \"analyzer\" : \"keyword\" } } } }"; + + builder = (IntervalQueryBuilder) parseQuery(keyword_json); + expected = new IntervalQuery(STRING_FIELD_NAME, Intervals.wildcard("Te?m")); + assertEquals(expected, builder.toQuery(createShardContext())); + + String fixed_field_json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + + "\"wildcard\" : { \"pattern\" : \"Te?m\", \"use_field\" : \"masked_field\" } } } }"; + + builder = (IntervalQueryBuilder) parseQuery(fixed_field_json); + expected = new IntervalQuery(STRING_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard("te?m"))); + assertEquals(expected, builder.toQuery(createShardContext())); + + String fixed_field_json_no_positions = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + + "\"wildcard\" : { \"pattern\" : \"Te?m\", \"use_field\" : \"" + NO_POSITIONS_FIELD + "\" } } } }"; + expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(fixed_field_json_no_positions); + builder1.toQuery(createShardContext()); + }); + + String fixed_field_analyzer_json = "{ \"intervals\" : { \"" + STRING_FIELD_NAME + "\": { " + + "\"wildcard\" : { \"pattern\" : \"Te?m\", \"use_field\" : \"masked_field\", \"analyzer\" : \"keyword\" } } } }"; + + builder = (IntervalQueryBuilder) parseQuery(fixed_field_analyzer_json); + expected = new IntervalQuery(STRING_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard("Te?m"))); + assertEquals(expected, builder.toQuery(createShardContext())); } } From d1ff9818faad79e8dd53d3e4dacac108a6c762b7 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 28 Jun 2019 08:09:50 -0500 Subject: [PATCH 059/140] [ML][Data Frame] removing format support (#43659) --- .../pivot/DateHistogramGroupSource.java | 35 +++---------------- .../pivot/DateHistogramGroupSourceTests.java | 1 - .../hlrc/DateHistogramGroupSourceTests.java | 4 --- .../pivot/DateHistogramGroupSource.java | 30 ++++++---------- .../pivot/DateHistogramGroupSourceTests.java | 19 ++++++++-- .../integration/DataFrameIntegTestCase.java | 12 +++---- .../integration/DataFrameTransformIT.java | 4 +-- .../integration/DataFramePivotRestIT.java | 8 ++--- .../dataframe/persistence/DataframeIndex.java | 19 ++-------- 9 files changed, 45 insertions(+), 87 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/pivot/DateHistogramGroupSource.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/pivot/DateHistogramGroupSource.java index d880bfd82140b..c8fb885896d9c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/pivot/DateHistogramGroupSource.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/pivot/DateHistogramGroupSource.java @@ -45,7 +45,6 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXContentObject { private static final ParseField TIME_ZONE = new ParseField("time_zone"); - private static final ParseField FORMAT = new ParseField("format"); // From DateHistogramAggregationBuilder in core, transplanted and modified to a set // so we don't need to import a dependency on the class @@ -195,8 +194,7 @@ public int hashCode() { } ZoneId zoneId = (ZoneId) args[3]; - String format = (String) args[4]; - return new DateHistogramGroupSource(field, interval, format, zoneId); + return new DateHistogramGroupSource(field, interval, zoneId); }); static { @@ -212,8 +210,6 @@ public int hashCode() { return ZoneOffset.ofHours(p.intValue()); } }, TIME_ZONE, ObjectParser.ValueType.LONG); - - PARSER.declareString(optionalConstructorArg(), FORMAT); } public static DateHistogramGroupSource fromXContent(final XContentParser parser) { @@ -221,13 +217,11 @@ public static DateHistogramGroupSource fromXContent(final XContentParser parser) } private final Interval interval; - private final String format; private final ZoneId timeZone; - DateHistogramGroupSource(String field, Interval interval, String format, ZoneId timeZone) { + DateHistogramGroupSource(String field, Interval interval, ZoneId timeZone) { super(field); this.interval = interval; - this.format = format; this.timeZone = timeZone; } @@ -240,10 +234,6 @@ public Interval getInterval() { return interval; } - public String getFormat() { - return format; - } - public ZoneId getTimeZone() { return timeZone; } @@ -258,9 +248,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (timeZone != null) { builder.field(TIME_ZONE.getPreferredName(), timeZone.toString()); } - if (format != null) { - builder.field(FORMAT.getPreferredName(), format); - } builder.endObject(); return builder; } @@ -279,13 +266,12 @@ public boolean equals(Object other) { return Objects.equals(this.field, that.field) && Objects.equals(this.interval, that.interval) && - Objects.equals(this.timeZone, that.timeZone) && - Objects.equals(this.format, that.format); + Objects.equals(this.timeZone, that.timeZone); } @Override public int hashCode() { - return Objects.hash(field, interval, timeZone, format); + return Objects.hash(field, interval, timeZone); } public static Builder builder() { @@ -296,7 +282,6 @@ public static class Builder { private String field; private Interval interval; - private String format; private ZoneId timeZone; /** @@ -319,16 +304,6 @@ public Builder setInterval(Interval interval) { return this; } - /** - * Set the optional String formatting for the time interval. - * @param format The format of the output for the time interval key - * @return The {@link Builder} with the format set. - */ - public Builder setFormat(String format) { - this.format = format; - return this; - } - /** * Sets the time zone to use for this aggregation * @param timeZone The zoneId for the timeZone @@ -340,7 +315,7 @@ public Builder setTimeZone(ZoneId timeZone) { } public DateHistogramGroupSource build() { - return new DateHistogramGroupSource(field, interval, format, timeZone); + return new DateHistogramGroupSource(field, interval, timeZone); } } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java index 32605f5c286ad..ece1c4fb743ee 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java @@ -39,7 +39,6 @@ public static DateHistogramGroupSource randomDateHistogramGroupSource() { String field = randomAlphaOfLengthBetween(1, 20); return new DateHistogramGroupSource(field, randomDateHistogramInterval(), - randomBoolean() ? randomAlphaOfLength(10) : null, randomBoolean() ? randomZone() : null); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/hlrc/DateHistogramGroupSourceTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/hlrc/DateHistogramGroupSourceTests.java index dc31004607dcd..fd98e52a1527e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/hlrc/DateHistogramGroupSourceTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/hlrc/DateHistogramGroupSourceTests.java @@ -44,9 +44,6 @@ public static DateHistogramGroupSource randomDateHistogramGroupSource() { if (randomBoolean()) { dateHistogramGroupSource.setTimeZone(randomZone()); } - if (randomBoolean()) { - dateHistogramGroupSource.setFormat(randomAlphaOfLength(10)); - } return dateHistogramGroupSource; } @@ -64,7 +61,6 @@ protected org.elasticsearch.client.dataframe.transforms.pivot.DateHistogramGroup protected void assertInstances(DateHistogramGroupSource serverTestInstance, org.elasticsearch.client.dataframe.transforms.pivot.DateHistogramGroupSource clientInstance) { assertThat(serverTestInstance.getField(), equalTo(clientInstance.getField())); - assertThat(serverTestInstance.getFormat(), equalTo(clientInstance.getFormat())); assertSameInterval(serverTestInstance.getInterval(), clientInstance.getInterval()); assertThat(serverTestInstance.getTimeZone(), equalTo(clientInstance.getTimeZone())); assertThat(serverTestInstance.getType().name(), equalTo(clientInstance.getType().name())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/DateHistogramGroupSource.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/DateHistogramGroupSource.java index e38915c0beac6..3b856f454c4f5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/DateHistogramGroupSource.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/DateHistogramGroupSource.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.dataframe.transforms.pivot; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -187,13 +188,11 @@ private void writeInterval(Interval interval, StreamOutput out) throws IOExcepti private static final String NAME = "data_frame_date_histogram_group"; private static final ParseField TIME_ZONE = new ParseField("time_zone"); - private static final ParseField FORMAT = new ParseField("format"); private static final ConstructingObjectParser STRICT_PARSER = createParser(false); private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); private final Interval interval; - private String format; private ZoneId timeZone; public DateHistogramGroupSource(String field, Interval interval) { @@ -205,7 +204,10 @@ public DateHistogramGroupSource(StreamInput in) throws IOException { super(in); this.interval = readInterval(in); this.timeZone = in.readOptionalZoneId(); - this.format = in.readOptionalString(); + // Format was optional in 7.2.x, removed in 7.3+ + if (in.getVersion().before(Version.V_7_3_0)) { + in.readOptionalString(); + } } private static ConstructingObjectParser createParser(boolean lenient) { @@ -242,7 +244,6 @@ private static ConstructingObjectParser createPa } }, TIME_ZONE, ObjectParser.ValueType.LONG); - parser.declareString(DateHistogramGroupSource::setFormat, FORMAT); return parser; } @@ -259,14 +260,6 @@ public Interval getInterval() { return interval; } - public String getFormat() { - return format; - } - - public void setFormat(String format) { - this.format = format; - } - public ZoneId getTimeZone() { return timeZone; } @@ -280,7 +273,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(field); writeInterval(interval, out); out.writeOptionalZoneId(timeZone); - out.writeOptionalString(format); + // Format was optional in 7.2.x, removed in 7.3+ + if (out.getVersion().before(Version.V_7_3_0)) { + out.writeOptionalString(null); + } } @Override @@ -293,9 +289,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (timeZone != null) { builder.field(TIME_ZONE.getPreferredName(), timeZone.toString()); } - if (format != null) { - builder.field(FORMAT.getPreferredName(), format); - } builder.endObject(); return builder; } @@ -314,13 +307,12 @@ public boolean equals(Object other) { return Objects.equals(this.field, that.field) && Objects.equals(interval, that.interval) && - Objects.equals(timeZone, that.timeZone) && - Objects.equals(format, that.format); + Objects.equals(timeZone, that.timeZone); } @Override public int hashCode() { - return Objects.hash(field, interval, timeZone, format); + return Objects.hash(field, interval, timeZone); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java index 7ce0374331323..b28cf603030f5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java @@ -6,6 +6,9 @@ package org.elasticsearch.xpack.core.dataframe.transforms.pivot; +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -29,12 +32,22 @@ public static DateHistogramGroupSource randomDateHistogramGroupSource() { if (randomBoolean()) { dateHistogramGroupSource.setTimeZone(randomZone()); } - if (randomBoolean()) { - dateHistogramGroupSource.setFormat(randomAlphaOfLength(10)); - } return dateHistogramGroupSource; } + public void testBackwardsSerialization() throws IOException { + DateHistogramGroupSource groupSource = randomDateHistogramGroupSource(); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(Version.V_7_2_0); + groupSource.writeTo(output); + try (StreamInput in = output.bytes().streamInput()) { + in.setVersion(Version.V_7_2_0); + DateHistogramGroupSource streamedGroupSource = new DateHistogramGroupSource(in); + assertEquals(groupSource, streamedGroupSource); + } + } + } + @Override protected DateHistogramGroupSource doParseInstance(XContentParser parser) throws IOException { return DateHistogramGroupSource.fromXContent(parser, false); diff --git a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java index e79dce592eb7c..a1d8aca86ba16 100644 --- a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java +++ b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java @@ -143,25 +143,21 @@ protected void waitUntilCheckpoint(String id, long checkpoint, TimeValue waitTim protected DateHistogramGroupSource createDateHistogramGroupSourceWithFixedInterval(String field, DateHistogramInterval interval, - ZoneId zone, - String format) { + ZoneId zone) { DateHistogramGroupSource.Builder builder = DateHistogramGroupSource.builder() .setField(field) .setInterval(new DateHistogramGroupSource.FixedInterval(interval)) - .setTimeZone(zone) - .setFormat(format); + .setTimeZone(zone); return builder.build(); } protected DateHistogramGroupSource createDateHistogramGroupSourceWithCalendarInterval(String field, DateHistogramInterval interval, - ZoneId zone, - String format) { + ZoneId zone) { DateHistogramGroupSource.Builder builder = DateHistogramGroupSource.builder() .setField(field) .setInterval(new DateHistogramGroupSource.CalendarInterval(interval)) - .setTimeZone(zone) - .setFormat(format); + .setTimeZone(zone); return builder.build(); } diff --git a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java index ef6a65c86787c..456ba91ef4381 100644 --- a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java +++ b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java @@ -45,7 +45,7 @@ public void testDataFrameTransformCrud() throws Exception { createReviewsIndex(indexName, 100); Map groups = new HashMap<>(); - groups.put("by-day", createDateHistogramGroupSourceWithCalendarInterval("timestamp", DateHistogramInterval.DAY, null, null)); + groups.put("by-day", createDateHistogramGroupSourceWithCalendarInterval("timestamp", DateHistogramInterval.DAY, null)); groups.put("by-user", TermsGroupSource.builder().setField("user_id").build()); groups.put("by-business", TermsGroupSource.builder().setField("business_id").build()); @@ -82,7 +82,7 @@ public void testContinuousDataFrameTransformCrud() throws Exception { createReviewsIndex(indexName, 100); Map groups = new HashMap<>(); - groups.put("by-day", createDateHistogramGroupSourceWithCalendarInterval("timestamp", DateHistogramInterval.DAY, null, null)); + groups.put("by-day", createDateHistogramGroupSourceWithCalendarInterval("timestamp", DateHistogramInterval.DAY, null)); groups.put("by-user", TermsGroupSource.builder().setField("user_id").build()); groups.put("by-business", TermsGroupSource.builder().setField("business_id").build()); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index 85457307fc84f..f2cd95ed1a9c7 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -373,7 +373,7 @@ public void testDateHistogramPivot() throws Exception { + " \"group_by\": {" + " \"by_hr\": {" + " \"date_histogram\": {" - + " \"fixed_interval\": \"1h\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-dd_HH\"" + + " \"fixed_interval\": \"1h\",\"field\":\"timestamp\"" + " } } }," + " \"aggregations\": {" + " \"avg_rating\": {" @@ -407,7 +407,7 @@ public void testPreviewTransform() throws Exception { config += " \"pivot\": {" + " \"group_by\": {" + " \"user.id\": {\"terms\": { \"field\": \"user_id\" }}," - + " \"by_day\": {\"date_histogram\": {\"fixed_interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-dd\"}}}," + + " \"by_day\": {\"date_histogram\": {\"fixed_interval\": \"1d\",\"field\":\"timestamp\"}}}," + " \"aggregations\": {" + " \"user.avg_rating\": {" + " \"avg\": {" @@ -457,7 +457,7 @@ public void testPreviewTransformWithPipeline() throws Exception { + " \"pivot\": {" + " \"group_by\": {" + " \"user.id\": {\"terms\": { \"field\": \"user_id\" }}," - + " \"by_day\": {\"date_histogram\": {\"fixed_interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-dd\"}}}," + + " \"by_day\": {\"date_histogram\": {\"fixed_interval\": \"1d\",\"field\":\"timestamp\"}}}," + " \"aggregations\": {" + " \"user.avg_rating\": {" + " \"avg\": {" @@ -497,7 +497,7 @@ public void testPivotWithMaxOnDateField() throws Exception { config +=" \"pivot\": { \n" + " \"group_by\": {\n" + " \"by_day\": {\"date_histogram\": {\n" + - " \"fixed_interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-dd\"\n" + + " \"fixed_interval\": \"1d\",\"field\":\"timestamp\"\n" + " }}\n" + " },\n" + " \n" + diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java index 488e9a73b32d9..005be73e8cee2 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java @@ -19,8 +19,6 @@ import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; -import org.elasticsearch.xpack.core.dataframe.transforms.pivot.DateHistogramGroupSource; -import org.elasticsearch.xpack.core.dataframe.transforms.pivot.SingleGroupSource; import java.io.IOException; import java.time.Clock; @@ -35,9 +33,7 @@ public final class DataframeIndex { private static final String PROPERTIES = "properties"; private static final String TYPE = "type"; - private static final String FORMAT = "format"; private static final String META = "_meta"; - private static final String DEFAULT_TIME_FORMAT = "strict_date_optional_time||epoch_millis"; private DataframeIndex() { } @@ -56,7 +52,7 @@ public static void createDestinationIndex(Client client, request.mapping( SINGLE_MAPPING_NAME, - createMappingXContent(mappings, transformConfig.getPivotConfig().getGroupConfig().getGroups(), transformConfig.getId(), clock)); + createMappingXContent(mappings, transformConfig.getId(), clock)); client.execute(CreateIndexAction.INSTANCE, request, ActionListener.wrap(createIndexResponse -> { listener.onResponse(true); @@ -69,13 +65,12 @@ public static void createDestinationIndex(Client client, } private static XContentBuilder createMappingXContent(Map mappings, - Map groupSources, String id, Clock clock) { try { XContentBuilder builder = jsonBuilder().startObject(); builder.startObject(SINGLE_MAPPING_NAME); - addProperties(builder, mappings, groupSources); + addProperties(builder, mappings); addMetaData(builder, id, clock); builder.endObject(); // _doc type return builder.endObject(); @@ -85,8 +80,7 @@ private static XContentBuilder createMappingXContent(Map mapping } private static XContentBuilder addProperties(XContentBuilder builder, - Map mappings, - Map groupSources) throws IOException { + Map mappings) throws IOException { builder.startObject(PROPERTIES); for (Entry field : mappings.entrySet()) { String fieldName = field.getKey(); @@ -95,13 +89,6 @@ private static XContentBuilder addProperties(XContentBuilder builder, builder.startObject(fieldName); builder.field(TYPE, fieldType); - SingleGroupSource groupSource = groupSources.get(fieldName); - if (groupSource instanceof DateHistogramGroupSource) { - String format = ((DateHistogramGroupSource) groupSource).getFormat(); - if (format != null) { - builder.field(FORMAT, DEFAULT_TIME_FORMAT + "||" + format); - } - } builder.endObject(); } builder.endObject(); // PROPERTIES From a46d7797bd3a1c9ff3c5fc1cbb764280245ef655 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 28 Jun 2019 16:33:24 +0300 Subject: [PATCH 060/140] Add missing dependencies so we can build in parallel (#43672) --- distribution/build.gradle | 3 ++- x-pack/plugin/sql/sql-cli/build.gradle | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index 1d081a2755f85..9606604036101 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -117,7 +117,7 @@ task buildTransportModules { void copyModule(Sync copyTask, Project module) { copyTask.configure { - dependsOn { module.bundlePlugin } + dependsOn "${module.path}:bundlePlugin" from({ zipTree(module.bundlePlugin.outputs.files.singleFile) }) { includeEmptyDirs false @@ -167,6 +167,7 @@ buildDefaultLog4jConfig.doLast(writeLog4jProperties) // copy log4j2.properties from modules that have it void copyLog4jProperties(Task buildTask, Project module) { + buildTask.dependsOn "${module.path}:bundlePlugin" buildTask.doFirst { FileTree tree = zipTree(module.bundlePlugin.outputs.files.singleFile) FileTree filtered = tree.matching { diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index 927d165c2d268..bbd87c055d708 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -51,9 +51,9 @@ dependencyLicenses { * can be easily shipped around and used. */ jar { + dependsOn configurations.runtimeClasspath from({ - configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } - configurations.runtime.collect { it.isDirectory() ? it : zipTree(it) } + configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } }) { // We don't need the META-INF from the things we bundle. For now. exclude 'META-INF/*' From 854215cae29aae6a9621cd9ce6cec5aeddae995e Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 28 Jun 2019 16:38:17 +0300 Subject: [PATCH 061/140] Testclusters: Convert additional projects (#43625) * Testclusters: Convert additional projects Found some more that were not using testclusters from elasticsearch-ci/1 * Allow IOException too * Make the client more resilient --- qa/ccs-unavailable-clusters/build.gradle | 1 + qa/die-with-dignity/build.gradle | 10 +++- .../elasticsearch/DieWithDignityPlugin.java | 4 ++ .../qa/die_with_dignity/DieWithDignityIT.java | 55 +++++++------------ qa/evil-tests/build.gradle | 1 + qa/multi-cluster-search/build.gradle | 39 ++++++------- qa/smoke-test-http/build.gradle | 3 +- .../build.gradle | 1 + 8 files changed, 56 insertions(+), 58 deletions(-) diff --git a/qa/ccs-unavailable-clusters/build.gradle b/qa/ccs-unavailable-clusters/build.gradle index ea80ee983b876..749623b26c329 100644 --- a/qa/ccs-unavailable-clusters/build.gradle +++ b/qa/ccs-unavailable-clusters/build.gradle @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.test-with-dependencies' diff --git a/qa/die-with-dignity/build.gradle b/qa/die-with-dignity/build.gradle index 140df6e283ab8..a40f6366e6515 100644 --- a/qa/die-with-dignity/build.gradle +++ b/qa/die-with-dignity/build.gradle @@ -17,6 +17,7 @@ * under the License. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.esplugin' esplugin { @@ -24,14 +25,17 @@ esplugin { classname 'org.elasticsearch.DieWithDignityPlugin' } -integTestRunner { +integTest.runner { systemProperty 'tests.security.manager', 'false' systemProperty 'tests.system_call_filter', 'false' - nonInputProperties.systemProperty 'pidfile', "${-> integTest.getNodes().get(0).pidFile}" - nonInputProperties.systemProperty 'log', "${-> integTest.getNodes().get(0).homeDir}/logs/${-> integTest.getNodes().get(0).clusterName}_server.json" + nonInputProperties.systemProperty 'log', "${-> testClusters.integTest.singleNode().getServerLog()}" systemProperty 'runtime.java.home', "${project.runtimeJavaHome}" } +testClusters.integTest { + systemProperty "die.with.dignity.test", "whatever" +} + test.enabled = false check.dependsOn integTest diff --git a/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java b/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java index ed1e3d3879a5d..8027eeb8948d0 100644 --- a/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java +++ b/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java @@ -36,6 +36,10 @@ public class DieWithDignityPlugin extends Plugin implements ActionPlugin { + public DieWithDignityPlugin() { + assert System.getProperty("die.with.dignity.test") != null : "test should pass the `die.with.dignity.test` property"; + } + @Override public List getRestHandlers( final Settings settings, diff --git a/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java b/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java index d83edfb2c997a..3c2359c59ce2e 100644 --- a/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java +++ b/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java @@ -19,12 +19,10 @@ package org.elasticsearch.qa.die_with_dignity; -import org.apache.http.ConnectionClosedException; -import org.apache.lucene.util.Constants; import org.elasticsearch.client.Request; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.rest.ESRestTestCase; -import org.hamcrest.Matcher; import java.io.BufferedReader; import java.io.IOException; @@ -36,51 +34,28 @@ import java.util.List; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.hasToString; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; public class DieWithDignityIT extends ESRestTestCase { public void testDieWithDignity() throws Exception { - // deleting the PID file prevents stopping the cluster from failing since it occurs if and only if the PID file exists - final Path pidFile = PathUtils.get(System.getProperty("pidfile")); - final List pidFileLines = Files.readAllLines(pidFile); - assertThat(pidFileLines, hasSize(1)); - final int pid = Integer.parseInt(pidFileLines.get(0)); - Files.delete(pidFile); - IOException e = expectThrows(IOException.class, - () -> client().performRequest(new Request("GET", "/_die_with_dignity"))); - Matcher failureMatcher = instanceOf(ConnectionClosedException.class); - if (Constants.WINDOWS) { - /* - * If the other side closes the connection while we're waiting to fill our buffer - * we can get IOException with the message below. It seems to only come up on - * Windows and it *feels* like it could be a ConnectionClosedException but - * upstream does not consider this a bug: - * https://issues.apache.org/jira/browse/HTTPASYNC-134 - * - * So we catch it here and consider it "ok". - */ - failureMatcher = either(failureMatcher) - .or(hasToString(containsString("An existing connection was forcibly closed by the remote host"))); - } - assertThat(e, failureMatcher); + expectThrows( + IOException.class, + () -> client().performRequest(new Request("GET", "/_die_with_dignity")) + ); // the Elasticsearch process should die and disappear from the output of jps assertBusy(() -> { final String jpsPath = PathUtils.get(System.getProperty("runtime.java.home"), "bin/jps").toString(); - final Process process = new ProcessBuilder().command(jpsPath).start(); + final Process process = new ProcessBuilder().command(jpsPath, "-v").start(); assertThat(process.waitFor(), equalTo(0)); + try (InputStream is = process.getInputStream(); BufferedReader in = new BufferedReader(new InputStreamReader(is, "UTF-8"))) { String line; while ((line = in.readLine()) != null) { - final int currentPid = Integer.parseInt(line.split("\\s+")[0]); - assertThat(line, pid, not(equalTo(currentPid))); + assertThat(line, line, not(containsString("-Ddie.with.dignity.test"))); } } }); @@ -95,9 +70,9 @@ public void testDieWithDignity() throws Exception { try { while (it.hasNext() && (fatalError == false || fatalErrorInThreadExiting == false)) { final String line = it.next(); - if (line.matches(".*ERROR.*o\\.e\\.ExceptionsHelper.*node-0.*fatal error.*")) { + if (line.matches(".*ERROR.*o\\.e\\.ExceptionsHelper.*integTest-0.*fatal error.*")) { fatalError = true; - } else if (line.matches(".*ERROR.*o\\.e\\.b\\.ElasticsearchUncaughtExceptionHandler.*node-0.*" + } else if (line.matches(".*ERROR.*o\\.e\\.b\\.ElasticsearchUncaughtExceptionHandler.*integTest-0.*" + "fatal error in thread \\[Thread-\\d+\\], exiting.*")) { fatalErrorInThreadExiting = true; assertTrue(it.hasNext()); @@ -127,4 +102,14 @@ protected boolean preserveClusterUponCompletion() { return true; } + @Override + protected final Settings restClientSettings() { + return Settings.builder().put(super.restClientSettings()) + // increase the timeout here to 90 seconds to handle long waits for a green + // cluster health. the waits for green need to be longer than a minute to + // account for delayed shards + .put(ESRestTestCase.CLIENT_SOCKET_TIMEOUT, "1s") + .build(); + } + } diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index 2f9239e5c2f22..38c1b3e1a9aa9 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -23,6 +23,7 @@ * threads, etc. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-test' dependencies { diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 7f923d03f7166..1913c86fc9c92 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -19,42 +19,43 @@ import org.elasticsearch.gradle.test.RestIntegTestTask +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(":client:rest-high-level") } -task remoteClusterTest(type: RestIntegTestTask) { +task 'remote-cluster'(type: RestIntegTestTask) { mustRunAfter(precommit) + runner { + systemProperty 'tests.rest.suite', 'remote_cluster' + } } -remoteClusterTestCluster { - numNodes = 2 - clusterName = 'remote-cluster' - setting 'cluster.remote.connect', false +testClusters.'remote-cluster' { + numberOfNodes = 2 + setting 'cluster.remote.connect', 'false' } -remoteClusterTestRunner { - systemProperty 'tests.rest.suite', 'remote_cluster' +task mixedClusterTest(type: RestIntegTestTask) { + useCluster testClusters.'remote-cluster' + runner { + dependsOn 'remote-cluster' + systemProperty 'tests.rest.suite', 'multi_cluster' + } } -task mixedClusterTest(type: RestIntegTestTask) {} - -mixedClusterTestCluster { - dependsOn remoteClusterTestRunner - setting 'cluster.remote.my_remote_cluster.seeds', "\"${-> remoteClusterTest.nodes.get(0).transportUri()}\"" - setting 'cluster.remote.connections_per_cluster', 1 - setting 'cluster.remote.connect', true +testClusters.mixedClusterTest { + setting 'cluster.remote.my_remote_cluster.seeds', + { "\"${testClusters.'remote-cluster'.getAllTransportPortURI().get(0)}\"" } + setting 'cluster.remote.connections_per_cluster', '1' + setting 'cluster.remote.connect', 'true' } -mixedClusterTestRunner { - systemProperty 'tests.rest.suite', 'multi_cluster' - finalizedBy 'remoteClusterTestCluster#node0.stop','remoteClusterTestCluster#node1.stop' -} task integTest { - dependsOn = [mixedClusterTest] + dependsOn mixedClusterTest } test.enabled = false // no unit tests for multi-cluster-search, only integration tests diff --git a/qa/smoke-test-http/build.gradle b/qa/smoke-test-http/build.gradle index ef1a97fc7abaf..90fc9e3950147 100644 --- a/qa/smoke-test-http/build.gradle +++ b/qa/smoke-test-http/build.gradle @@ -17,6 +17,7 @@ * under the License. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.test-with-dependencies' @@ -26,7 +27,7 @@ dependencies { testCompile project(path: ':plugins:transport-nio', configuration: 'runtime') // for http } -integTestRunner { +integTest.runner { /* * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each * other if we allow them to set the number of available processors as it's set-once in Netty. diff --git a/qa/smoke-test-ingest-with-all-dependencies/build.gradle b/qa/smoke-test-ingest-with-all-dependencies/build.gradle index 9267f90cd7e0a..9f5c40ac93798 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/build.gradle +++ b/qa/smoke-test-ingest-with-all-dependencies/build.gradle @@ -17,6 +17,7 @@ * under the License. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' From fd4eb96d1c2ee7091d99b5a5069673de45833801 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 28 Jun 2019 15:42:43 +0200 Subject: [PATCH 062/140] Refactor IndexSearcherWrapper to disallow the wrapping of IndexSearcher (#43645) This change removes the ability to wrap an IndexSearcher in plugins. The IndexSearcherWrapper is replaced by an IndexReaderWrapper and allows to wrap the DirectoryReader only. This simplifies the creation of the context IndexSearcher that is used on a per request basis. This change also moves the optimization that was implemented in the security index searcher wrapper to the ContextIndexSearcher that now checks the live docs to determine how the search should be executed. If the underlying live docs is a sparse bit set the searcher will compute the intersection betweeen the query and the live docs instead of checking the live docs on every document that match the query. --- .../apache/lucene/search/XIndexSearcher.java | 46 -- .../apache/lucene/util/CombinedBitSet.java | 117 ++++ .../org/elasticsearch/index/IndexModule.java | 49 +- .../org/elasticsearch/index/IndexService.java | 16 +- .../index/shard/IndexSearcherWrapper.java | 138 ----- .../elasticsearch/index/shard/IndexShard.java | 79 ++- .../search/DefaultSearchContext.java | 3 +- .../search/internal/ContextIndexSearcher.java | 122 +++- .../lucene/util/CombinedBitSetTests.java | 111 ++++ .../elasticsearch/index/IndexModuleTests.java | 20 +- .../index/engine/InternalEngineTests.java | 31 - ...ests.java => IndexReaderWrapperTests.java} | 91 +-- .../index/shard/IndexShardIT.java | 12 +- .../index/shard/IndexShardTests.java | 43 +- .../IndexingMemoryControllerTests.java | 5 +- .../internal/ContextIndexSearcherTests.java | 425 +++++++++++++ .../profile/query/QueryProfilerTests.java | 9 +- .../index/shard/IndexShardTestCase.java | 36 +- .../aggregations/AggregatorTestCase.java | 16 +- .../test/engine/MockEngineSupport.java | 19 +- .../index/engine/FrozenEngine.java | 8 +- .../accesscontrol/DocumentSubsetReader.java | 23 +- .../SecurityIndexReaderWrapper.java | 111 ++++ .../SecurityIndexSearcherWrapper.java | 217 ------- .../SourceOnlySnapshotShardTests.java | 4 +- ...tyIndexReaderWrapperIntegrationTests.java} | 17 +- .../SecurityIndexReaderWrapperUnitTests.java | 225 +++++++ ...SecurityIndexSearcherWrapperUnitTests.java | 561 ------------------ .../xpack/security/Security.java | 6 +- 29 files changed, 1275 insertions(+), 1285 deletions(-) delete mode 100644 server/src/main/java/org/apache/lucene/search/XIndexSearcher.java create mode 100644 server/src/main/java/org/apache/lucene/util/CombinedBitSet.java delete mode 100644 server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java create mode 100644 server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java rename server/src/test/java/org/elasticsearch/index/shard/{IndexSearcherWrapperTests.java => IndexReaderWrapperTests.java} (65%) create mode 100644 server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/{SecurityIndexSearcherWrapperIntegrationTests.java => SecurityIndexReaderWrapperIntegrationTests.java} (93%) create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperUnitTests.java delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java diff --git a/server/src/main/java/org/apache/lucene/search/XIndexSearcher.java b/server/src/main/java/org/apache/lucene/search/XIndexSearcher.java deleted file mode 100644 index 100c5f4944afe..0000000000000 --- a/server/src/main/java/org/apache/lucene/search/XIndexSearcher.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.lucene.search; - -import org.apache.lucene.index.LeafReaderContext; - -import java.io.IOException; -import java.util.List; - -/** - * A wrapper for {@link IndexSearcher} that makes {@link IndexSearcher#search(List, Weight, Collector)} - * visible by sub-classes. - */ -public class XIndexSearcher extends IndexSearcher { - private final IndexSearcher in; - - public XIndexSearcher(IndexSearcher in) { - super(in.getIndexReader()); - this.in = in; - setSimilarity(in.getSimilarity()); - setQueryCache(in.getQueryCache()); - setQueryCachingPolicy(in.getQueryCachingPolicy()); - } - - @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { - in.search(leaves, weight, collector); - } -} diff --git a/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java b/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java new file mode 100644 index 0000000000000..cb1bd819ab24a --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.util; + +import org.apache.lucene.search.DocIdSetIterator; + +/** + * A {@link BitSet} implementation that combines two instances of {@link BitSet} and {@link Bits} + * to provide a single merged view. + */ +public final class CombinedBitSet extends BitSet implements Bits { + private final BitSet first; + private final Bits second; + private final int length; + + public CombinedBitSet(BitSet first, Bits second) { + this.first = first; + this.second = second; + this.length = first.length(); + } + + public BitSet getFirst() { + return first; + } + + /** + * This implementation is slow and requires to iterate over all bits to compute + * the intersection. Use {@link #approximateCardinality()} for + * a fast approximation. + */ + @Override + public int cardinality() { + int card = 0; + for (int i = 0; i < length; i++) { + card += get(i) ? 1 : 0; + } + return card; + } + + @Override + public int approximateCardinality() { + return first.cardinality(); + } + + @Override + public int prevSetBit(int index) { + assert index >= 0 && index < length : "index=" + index + ", numBits=" + length(); + int prev = first.prevSetBit(index); + while (prev != -1 && second.get(prev) == false) { + if (prev == 0) { + return -1; + } + prev = first.prevSetBit(prev-1); + } + return prev; + } + + @Override + public int nextSetBit(int index) { + assert index >= 0 && index < length : "index=" + index + " numBits=" + length(); + int next = first.nextSetBit(index); + while (next != DocIdSetIterator.NO_MORE_DOCS && second.get(next) == false) { + if (next == length() - 1) { + return DocIdSetIterator.NO_MORE_DOCS; + } + next = first.nextSetBit(next+1); + } + return next; + } + + @Override + public long ramBytesUsed() { + return first.ramBytesUsed(); + } + + @Override + public boolean get(int index) { + return first.get(index) && second.get(index); + } + + @Override + public int length() { + return length; + } + + @Override + public void set(int i) { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public void clear(int i) { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public void clear(int startIndex, int endIndex) { + throw new UnsupportedOperationException("not implemented"); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index ca0f34803cc0c..6ef335144eb52 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -19,6 +19,10 @@ package org.elasticsearch.index; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilterDirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.MMapDirectory; @@ -26,6 +30,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; import org.elasticsearch.client.Client; +import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; @@ -38,10 +43,10 @@ import org.elasticsearch.index.cache.query.DisabledQueryCache; import org.elasticsearch.index.cache.query.IndexQueryCache; import org.elasticsearch.index.cache.query.QueryCache; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexEventListener; -import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.IndexingOperationListener; import org.elasticsearch.index.shard.SearchOperationListener; import org.elasticsearch.index.similarity.SimilarityService; @@ -112,7 +117,8 @@ public final class IndexModule { private final IndexSettings indexSettings; private final AnalysisRegistry analysisRegistry; private final EngineFactory engineFactory; - private SetOnce indexSearcherWrapper = new SetOnce<>(); + private SetOnce>> indexReaderWrapper = new SetOnce<>(); private final Set indexEventListeners = new HashSet<>(); private final Map> similarities = new HashMap<>(); private final Map directoryFactories; @@ -277,13 +283,26 @@ public void addSimilarity(String name, TriFunction + * The {@link CheckedFunction} is invoked each time a {@link Engine.Searcher} is requested to do an operation, + * for example search, and must return a new directory reader wrapping the provided directory reader or if no + * wrapping was performed the provided directory reader. + * The wrapped reader can filter out document just like delete documents etc. but must not change any term or + * document content. + * NOTE: The index reader wrapper ({@link CheckedFunction}) has a per-request lifecycle, + * must delegate {@link IndexReader#getReaderCacheHelper()}, {@link LeafReader#getCoreCacheHelper()} + * and must be an instance of {@link FilterDirectoryReader} that eventually exposes the original reader + * via {@link FilterDirectoryReader#getDelegate()}. + * The returned reader is closed once it goes out of scope. + *

    */ - public void setSearcherWrapper(IndexSearcherWrapperFactory indexSearcherWrapperFactory) { + public void setReaderWrapper(Function> indexReaderWrapperFactory) { ensureNotFrozen(); - this.indexSearcherWrapper.set(indexSearcherWrapperFactory); + this.indexReaderWrapper.set(indexReaderWrapperFactory); } IndexEventListener freeze() { // pkg private for testing @@ -348,16 +367,6 @@ public boolean match(String setting) { } - /** - * Factory for creating new {@link IndexSearcherWrapper} instances - */ - public interface IndexSearcherWrapperFactory { - /** - * Returns a new IndexSearcherWrapper. This method is called once per index per node - */ - IndexSearcherWrapper newWrapper(IndexService indexService); - } - public static Type defaultStoreType(final boolean allowMmap) { if (allowMmap && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { return Type.HYBRIDFS; @@ -384,8 +393,8 @@ public IndexService newIndexService( NamedWriteableRegistry namedWriteableRegistry) throws IOException { final IndexEventListener eventListener = freeze(); - IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null - ? (shard) -> null : indexSearcherWrapper.get(); + Function> readerWrapperFactory = + indexReaderWrapper.get() == null ? (shard) -> null : indexReaderWrapper.get(); eventListener.beforeIndexCreated(indexSettings.getIndex(), indexSettings.getSettings()); final IndexStorePlugin.DirectoryFactory directoryFactory = getDirectoryFactory(indexSettings, directoryFactories); final QueryCache queryCache; @@ -402,7 +411,7 @@ public IndexService newIndexService( return new IndexService(indexSettings, indexCreationContext, environment, xContentRegistry, new SimilarityService(indexSettings, scriptService, similarities), shardStoreDeleter, analysisRegistry, engineFactory, circuitBreakerService, bigArrays, threadPool, scriptService, - client, queryCache, directoryFactory, eventListener, searcherWrapperFactory, mapperRegistry, + client, queryCache, directoryFactory, eventListener, readerWrapperFactory, mapperRegistry, indicesFieldDataCache, searchOperationListeners, indexOperationListeners, namedWriteableRegistry); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 7c3dc0fe497be..c090b2b0c9837 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -20,6 +20,7 @@ package org.elasticsearch.index; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.search.IndexSearcher; @@ -31,6 +32,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; @@ -59,7 +61,6 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.seqno.RetentionLeaseSyncer; import org.elasticsearch.index.shard.IndexEventListener; -import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardClosedException; import org.elasticsearch.index.shard.IndexingOperationListener; @@ -91,6 +92,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Supplier; @@ -105,7 +107,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final NodeEnvironment nodeEnv; private final ShardStoreDeleter shardStoreDeleter; private final IndexStorePlugin.DirectoryFactory directoryFactory; - private final IndexSearcherWrapper searcherWrapper; + private final CheckedFunction readerWrapper; private final IndexCache indexCache; private final MapperService mapperService; private final NamedXContentRegistry xContentRegistry; @@ -152,7 +154,7 @@ public IndexService( QueryCache queryCache, IndexStorePlugin.DirectoryFactory directoryFactory, IndexEventListener eventListener, - IndexModule.IndexSearcherWrapperFactory wrapperFactory, + Function> wrapperFactory, MapperRegistry mapperRegistry, IndicesFieldDataCache indicesFieldDataCache, List searchOperationListeners, @@ -204,7 +206,7 @@ public IndexService( this.directoryFactory = directoryFactory; this.engineFactory = Objects.requireNonNull(engineFactory); // initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE - this.searcherWrapper = wrapperFactory.newWrapper(this); + this.readerWrapper = wrapperFactory.apply(this); this.searchOperationListeners = Collections.unmodifiableList(searchOperationListeners); this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners); // kick off async ops for the first shard in this index @@ -417,7 +419,7 @@ public synchronized IndexShard createShard( similarityService, engineFactory, eventListener, - searcherWrapper, + readerWrapper, threadPool, bigArrays, engineWarmer, @@ -752,8 +754,8 @@ public final EngineFactory getEngineFactory() { return engineFactory; } - final IndexSearcherWrapper getSearcherWrapper() { - return searcherWrapper; + final CheckedFunction getReaderWrapper() { + return readerWrapper; } // pkg private for testing final IndexStorePlugin.DirectoryFactory getDirectoryFactory() { diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java b/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java deleted file mode 100644 index 3a6df72a740e9..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.shard; - -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.FilterDirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.search.IndexSearcher; -import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; -import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.index.engine.Engine; - -import java.io.IOException; - -/** - * Extension point to add custom functionality at request time to the {@link DirectoryReader} - * and {@link IndexSearcher} managed by the {@link IndexShard}. - */ -public class IndexSearcherWrapper { - - /** - * Wraps the given {@link DirectoryReader}. The wrapped reader can filter out document just like delete documents etc. but - * must not change any term or document content. - *

    - * NOTE: The wrapper has a per-request lifecycle, must delegate {@link IndexReader#getReaderCacheHelper()}, - * {@link LeafReader#getCoreCacheHelper()} and must be an instance of {@link FilterDirectoryReader} that - * eventually exposes the original reader via {@link FilterDirectoryReader#getDelegate()}. - * The returned reader is closed once it goes out of scope. - *

    - * @param reader The provided directory reader to be wrapped to add custom functionality - * @return a new directory reader wrapping the provided directory reader or if no wrapping was performed - * the provided directory reader - */ - protected DirectoryReader wrap(DirectoryReader reader) throws IOException { - return reader; - } - - /** - * @param searcher The provided index searcher to be wrapped to add custom functionality - * @return a new index searcher wrapping the provided index searcher or if no wrapping was performed - * the provided index searcher - */ - protected IndexSearcher wrap(IndexSearcher searcher) throws IOException { - return searcher; - } - /** - * If there are configured {@link IndexSearcherWrapper} instances, the {@link IndexSearcher} of the provided engine searcher - * gets wrapped and a new {@link Engine.Searcher} instances is returned, otherwise the provided {@link Engine.Searcher} is returned. - * - * This is invoked each time a {@link Engine.Searcher} is requested to do an operation. (for example search) - */ - public final Engine.Searcher wrap(Engine.Searcher engineSearcher) throws IOException { - final ElasticsearchDirectoryReader elasticsearchDirectoryReader = - ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(engineSearcher.getDirectoryReader()); - if (elasticsearchDirectoryReader == null) { - throw new IllegalStateException("Can't wrap non elasticsearch directory reader"); - } - NonClosingReaderWrapper nonClosingReaderWrapper = new NonClosingReaderWrapper(engineSearcher.getDirectoryReader()); - DirectoryReader reader = wrap(nonClosingReaderWrapper); - if (reader != nonClosingReaderWrapper) { - if (reader.getReaderCacheHelper() != elasticsearchDirectoryReader.getReaderCacheHelper()) { - throw new IllegalStateException("wrapped directory reader doesn't delegate IndexReader#getCoreCacheKey," + - " wrappers must override this method and delegate to the original readers core cache key. Wrapped readers can't be " + - "used as cache keys since their are used only per request which would lead to subtle bugs"); - } - if (ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(reader) != elasticsearchDirectoryReader) { - // prevent that somebody wraps with a non-filter reader - throw new IllegalStateException("wrapped directory reader hides actual ElasticsearchDirectoryReader but shouldn't"); - } - } - - final IndexSearcher origIndexSearcher = engineSearcher.searcher(); - final IndexSearcher innerIndexSearcher = new IndexSearcher(reader); - innerIndexSearcher.setQueryCache(origIndexSearcher.getQueryCache()); - innerIndexSearcher.setQueryCachingPolicy(origIndexSearcher.getQueryCachingPolicy()); - innerIndexSearcher.setSimilarity(origIndexSearcher.getSimilarity()); - // TODO: Right now IndexSearcher isn't wrapper friendly, when it becomes wrapper friendly we should revise this extension point - // For example if IndexSearcher#rewrite() is overwritten than also IndexSearcher#createNormalizedWeight needs to be overwritten - // This needs to be fixed before we can allow the IndexSearcher from Engine to be wrapped multiple times - final IndexSearcher indexSearcher = wrap(innerIndexSearcher); - if (reader == nonClosingReaderWrapper && indexSearcher == innerIndexSearcher) { - return engineSearcher; - } else { - // we close the reader to make sure wrappers can release resources if needed.... - // our NonClosingReaderWrapper makes sure that our reader is not closed - return new Engine.Searcher(engineSearcher.source(), indexSearcher, () -> - IOUtils.close(indexSearcher.getIndexReader(), // this will close the wrappers excluding the NonClosingReaderWrapper - engineSearcher)); // this will run the closeable on the wrapped engine searcher - } - } - - private static final class NonClosingReaderWrapper extends FilterDirectoryReader { - - private NonClosingReaderWrapper(DirectoryReader in) throws IOException { - super(in, new SubReaderWrapper() { - @Override - public LeafReader wrap(LeafReader reader) { - return reader; - } - }); - } - - @Override - protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { - return new NonClosingReaderWrapper(in); - } - - @Override - protected void doClose() throws IOException { - // don't close here - mimic the MultiReader#doClose = false behavior that FilterDirectoryReader doesn't have - } - - @Override - public CacheHelper getReaderCacheHelper() { - return in.getReaderCacheHelper(); - } - - } - -} diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 7b4e06a451c7d..d0733017845cd 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -23,9 +23,13 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.index.CheckIndex; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ReferenceManager; @@ -50,6 +54,7 @@ import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; @@ -243,7 +248,7 @@ Runnable getGlobalCheckpointSyncer() { private static final EnumSet writeAllowedStates = EnumSet.of(IndexShardState.RECOVERING, IndexShardState.POST_RECOVERY, IndexShardState.STARTED); - private final IndexSearcherWrapper searcherWrapper; + private final CheckedFunction readerWrapper; /** * True if this shard is still indexing (recently) and false if we've been idle for long enough (as periodically checked by {@link @@ -269,7 +274,7 @@ public IndexShard( final SimilarityService similarityService, final @Nullable EngineFactory engineFactory, final IndexEventListener indexEventListener, - final IndexSearcherWrapper indexSearcherWrapper, + final CheckedFunction indexReaderWrapper, final ThreadPool threadPool, final BigArrays bigArrays, final Engine.Warmer warmer, @@ -349,7 +354,7 @@ public boolean shouldCache(Query query) { cachingPolicy = new UsageTrackingQueryCachingPolicy(); } indexShardOperationPermits = new IndexShardOperationPermits(shardId, threadPool); - searcherWrapper = indexSearcherWrapper; + readerWrapper = indexReaderWrapper; refreshListeners = buildRefreshListeners(); lastSearcherAccess.set(threadPool.relativeTimeInMillis()); persistMetadata(path, indexSettings, shardRouting, null, logger); @@ -1230,7 +1235,7 @@ private Engine.Searcher acquireSearcher(String source, Engine.SearcherScope scop != null : "DirectoryReader must be an instance or ElasticsearchDirectoryReader"; boolean success = false; try { - final Engine.Searcher wrappedSearcher = searcherWrapper == null ? searcher : searcherWrapper.wrap(searcher); + final Engine.Searcher wrappedSearcher = readerWrapper == null ? searcher : wrapSearcher(searcher, readerWrapper); assert wrappedSearcher != null; success = true; return wrappedSearcher; @@ -1243,6 +1248,72 @@ private Engine.Searcher acquireSearcher(String source, Engine.SearcherScope scop } } + static Engine.Searcher wrapSearcher(Engine.Searcher engineSearcher, + CheckedFunction readerWrapper) throws IOException { + assert readerWrapper != null; + final ElasticsearchDirectoryReader elasticsearchDirectoryReader = + ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(engineSearcher.getDirectoryReader()); + if (elasticsearchDirectoryReader == null) { + throw new IllegalStateException("Can't wrap non elasticsearch directory reader"); + } + NonClosingReaderWrapper nonClosingReaderWrapper = new NonClosingReaderWrapper(engineSearcher.getDirectoryReader()); + DirectoryReader reader = readerWrapper.apply(nonClosingReaderWrapper); + if (reader != nonClosingReaderWrapper) { + if (reader.getReaderCacheHelper() != elasticsearchDirectoryReader.getReaderCacheHelper()) { + throw new IllegalStateException("wrapped directory reader doesn't delegate IndexReader#getCoreCacheKey," + + " wrappers must override this method and delegate to the original readers core cache key. Wrapped readers can't be " + + "used as cache keys since their are used only per request which would lead to subtle bugs"); + } + if (ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(reader) != elasticsearchDirectoryReader) { + // prevent that somebody wraps with a non-filter reader + throw new IllegalStateException("wrapped directory reader hides actual ElasticsearchDirectoryReader but shouldn't"); + } + } + + if (reader == nonClosingReaderWrapper) { + return engineSearcher; + } else { + final IndexSearcher origIndexSearcher = engineSearcher.searcher(); + final IndexSearcher newIndexSearcher = new IndexSearcher(reader); + newIndexSearcher.setQueryCache(origIndexSearcher.getQueryCache()); + newIndexSearcher.setQueryCachingPolicy(origIndexSearcher.getQueryCachingPolicy()); + newIndexSearcher.setSimilarity(origIndexSearcher.getSimilarity()); + // we close the reader to make sure wrappers can release resources if needed.... + // our NonClosingReaderWrapper makes sure that our reader is not closed + return new Engine.Searcher(engineSearcher.source(), newIndexSearcher, () -> + IOUtils.close(newIndexSearcher.getIndexReader(), // this will close the wrappers excluding the NonClosingReaderWrapper + engineSearcher)); // this will run the closeable on the wrapped engine searcher + } + } + + private static final class NonClosingReaderWrapper extends FilterDirectoryReader { + + private NonClosingReaderWrapper(DirectoryReader in) throws IOException { + super(in, new SubReaderWrapper() { + @Override + public LeafReader wrap(LeafReader reader) { + return reader; + } + }); + } + + @Override + protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { + return new NonClosingReaderWrapper(in); + } + + @Override + protected void doClose() throws IOException { + // don't close here - mimic the MultiReader#doClose = false behavior that FilterDirectoryReader doesn't have + } + + @Override + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); + } + + } + public void close(String reason, boolean flushEngine) throws IOException { synchronized (mutex) { try { diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 1c6c2cbfa0c33..c2c089ffe31ab 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -173,7 +173,8 @@ final class DefaultSearchContext extends SearchContext { this.indexShard = indexShard; this.indexService = indexService; this.clusterService = clusterService; - this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy()); + this.searcher = new ContextIndexSearcher(engineSearcher.reader(), engineSearcher.searcher().getSimilarity(), + indexService.cache().query(), indexShard.getQueryCachingPolicy()); this.relativeTimeSupplier = relativeTimeSupplier; this.timeout = timeout; this.minNodeVersion = minNodeVersion; diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 49c310ba706b6..794c72ec14cfb 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -20,14 +20,19 @@ package org.elasticsearch.search.internal; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermStates; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.ConjunctionDISI; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; @@ -35,9 +40,13 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.Weight; -import org.apache.lucene.search.XIndexSearcher; +import org.apache.lucene.search.similarities.Similarity; +import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.BitSetIterator; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.CombinedBitSet; +import org.apache.lucene.util.SparseFixedBitSet; import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.index.engine.Engine; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.profile.Timer; import org.elasticsearch.search.profile.query.ProfileWeight; @@ -46,6 +55,7 @@ import org.elasticsearch.search.profile.query.QueryTimingType; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.Set; @@ -53,26 +63,19 @@ * Context-aware extension of {@link IndexSearcher}. */ public class ContextIndexSearcher extends IndexSearcher implements Releasable { - - /** The wrapped {@link IndexSearcher}. The reason why we sometimes prefer delegating to this searcher instead of {@code super} is that - * this instance may have more assertions, for example if it comes from MockInternalEngine which wraps the IndexSearcher into an - * AssertingIndexSearcher. */ - private final XIndexSearcher in; + /** + * The interval at which we check for search cancellation when we cannot use + * a {@link CancellableBulkScorer}. See {@link #intersectScorerAndBitSet}. + */ + private static int CHECK_CANCELLED_SCORER_INTERVAL = 1 << 11; private AggregatedDfs aggregatedDfs; - - private final Engine.Searcher engineSearcher; - - // TODO revisit moving the profiler to inheritance or wrapping model in the future private QueryProfiler profiler; - private Runnable checkCancelled; - public ContextIndexSearcher(Engine.Searcher searcher, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) { - super(searcher.reader()); - engineSearcher = searcher; - in = new XIndexSearcher(searcher.searcher()); - setSimilarity(searcher.searcher().getSimilarity()); + public ContextIndexSearcher(IndexReader reader, Similarity similarity, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) { + super(reader); + setSimilarity(similarity); setQueryCache(queryCache); setQueryCachingPolicy(queryCachingPolicy); } @@ -104,7 +107,7 @@ public Query rewrite(Query original) throws IOException { } try { - return in.rewrite(original); + return super.rewrite(original); } finally { if (profiler != null) { profiler.stopAndAddRewriteTime(); @@ -130,7 +133,6 @@ public Weight createWeight(Query query, ScoreMode scoreMode, float boost) throws } return new ProfileWeight(query, weight, profile); } else { - // needs to be 'super', not 'in' in order to use aggregated DFS return super.createWeight(query, scoreMode, boost); } } @@ -158,7 +160,6 @@ public boolean isCacheable(LeafReaderContext ctx) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { - // in case the wrapped searcher (in) uses the scorer directly return weight.scorer(context); } @@ -175,16 +176,75 @@ public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { } else { cancellableWeight = weight; } - in.search(leaves, cancellableWeight, collector); + searchInternal(leaves, cancellableWeight, collector); } - @Override - public Explanation explain(Query query, int doc) throws IOException { - if (aggregatedDfs != null) { - // dfs data is needed to explain the score - return super.explain(createWeight(rewrite(query), ScoreMode.COMPLETE, 1f), doc); + private void searchInternal(List leaves, Weight weight, Collector collector) throws IOException { + for (LeafReaderContext ctx : leaves) { // search each subreader + final LeafCollector leafCollector; + try { + leafCollector = collector.getLeafCollector(ctx); + } catch (CollectionTerminatedException e) { + // there is no doc of interest in this reader context + // continue with the following leaf + continue; + } + Bits liveDocs = ctx.reader().getLiveDocs(); + BitSet liveDocsBitSet = getSparseBitSetOrNull(ctx.reader().getLiveDocs()); + if (liveDocsBitSet == null) { + BulkScorer bulkScorer = weight.bulkScorer(ctx); + if (bulkScorer != null) { + try { + bulkScorer.score(leafCollector, liveDocs); + } catch (CollectionTerminatedException e) { + // collection was terminated prematurely + // continue with the following leaf + } + } + } else { + // if the role query result set is sparse then we should use the SparseFixedBitSet for advancing: + Scorer scorer = weight.scorer(ctx); + if (scorer != null) { + try { + intersectScorerAndBitSet(scorer, liveDocsBitSet, leafCollector, + checkCancelled == null ? () -> {} : checkCancelled); + } catch (CollectionTerminatedException e) { + // collection was terminated prematurely + // continue with the following leaf + } + } + } + } + } + + private static BitSet getSparseBitSetOrNull(Bits liveDocs) { + if (liveDocs instanceof SparseFixedBitSet) { + return (BitSet) liveDocs; + } else if (liveDocs instanceof CombinedBitSet + // if the underlying role bitset is sparse + && ((CombinedBitSet) liveDocs).getFirst() instanceof SparseFixedBitSet) { + return (BitSet) liveDocs; + } else { + return null; + } + + } + + static void intersectScorerAndBitSet(Scorer scorer, BitSet acceptDocs, + LeafCollector collector, Runnable checkCancelled) throws IOException { + // ConjunctionDISI uses the DocIdSetIterator#cost() to order the iterators, so if roleBits has the lowest cardinality it should + // be used first: + DocIdSetIterator iterator = ConjunctionDISI.intersectIterators(Arrays.asList(new BitSetIterator(acceptDocs, + acceptDocs.approximateCardinality()), scorer.iterator())); + int seen = 0; + checkCancelled.run(); + for (int docId = iterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { + if (++seen % CHECK_CANCELLED_SCORER_INTERVAL == 0) { + checkCancelled.run(); + } + collector.collect(docId); } - return in.explain(query, doc); + checkCancelled.run(); } @Override @@ -216,10 +276,8 @@ public CollectionStatistics collectionStatistics(String field) throws IOExceptio } public DirectoryReader getDirectoryReader() { - return engineSearcher.getDirectoryReader(); - } - - public Engine.Searcher getEngineSearcher() { - return engineSearcher; + final IndexReader reader = getIndexReader(); + assert reader instanceof DirectoryReader : "expected an instance of DirectoryReader, got " + reader.getClass(); + return (DirectoryReader) reader; } } diff --git a/server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java b/server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java new file mode 100644 index 0000000000000..65165272f5469 --- /dev/null +++ b/server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.util; + +import org.apache.lucene.search.DocIdSetIterator; +import org.elasticsearch.test.ESTestCase; + +public class CombinedBitSetTests extends ESTestCase { + public void testEmpty() { + for (float percent : new float[] {0f, 0.1f, 0.5f, 0.9f, 1f}) { + testCase(randomIntBetween(1, 10000), 0f, percent); + testCase(randomIntBetween(1, 10000), percent, 0f); + } + } + + public void testSparse() { + for (float percent : new float[] {0f, 0.1f, 0.5f, 0.9f, 1f}) { + testCase(randomIntBetween(1, 10000), 0.1f, percent); + testCase(randomIntBetween(1, 10000), percent, 0.1f); + } + } + + public void testDense() { + for (float percent : new float[] {0f, 0.1f, 0.5f, 0.9f, 1f}) { + testCase(randomIntBetween(1, 10000), 0.9f, percent); + testCase(randomIntBetween(1, 10000), percent, 0.9f); + } + } + + public void testRandom() { + int iterations = atLeast(10); + for (int i = 0; i < iterations; i++) { + testCase(randomIntBetween(1, 10000), randomFloat(), randomFloat()); + } + } + + private void testCase(int numBits, float percent1, float percent2) { + BitSet first = randomSet(numBits, percent1); + BitSet second = randomSet(numBits, percent2); + CombinedBitSet actual = new CombinedBitSet(first, second); + FixedBitSet expected = new FixedBitSet(numBits); + or(expected, first); + and(expected, second); + assertEquals(expected.cardinality(), actual.cardinality()); + assertEquals(expected, actual, numBits); + for (int i = 0; i < numBits; ++i) { + assertEquals(expected.nextSetBit(i), actual.nextSetBit(i)); + assertEquals(Integer.toString(i), expected.prevSetBit(i), actual.prevSetBit(i)); + } + } + + private void or(BitSet set1, BitSet set2) { + int next = 0; + while (next < set2.length() && (next = set2.nextSetBit(next)) != DocIdSetIterator.NO_MORE_DOCS) { + set1.set(next); + next += 1; + } + } + + private void and(BitSet set1, BitSet set2) { + int next = 0; + while (next < set1.length() && (next = set1.nextSetBit(next)) != DocIdSetIterator.NO_MORE_DOCS) { + if (set2.get(next) == false) { + set1.clear(next); + } + next += 1; + } + } + + private void assertEquals(BitSet set1, BitSet set2, int maxDoc) { + for (int i = 0; i < maxDoc; ++i) { + assertEquals("Different at " + i, set1.get(i), set2.get(i)); + } + } + + private BitSet randomSet(int numBits, float percentSet) { + return randomSet(numBits, (int) (percentSet * numBits)); + } + + private BitSet randomSet(int numBits, int numBitsSet) { + assert numBitsSet <= numBits; + final BitSet set = randomBoolean() ? new SparseFixedBitSet(numBits) : new FixedBitSet(numBits); + for (int i = 0; i < numBitsSet; ++i) { + while (true) { + final int o = random().nextInt(numBits); + if (set.get(o) == false) { + set.set(o); + break; + } + } + } + return set; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index d0f811007a6fa..d052fa365beb2 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.Weight; @@ -34,6 +33,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -50,14 +50,12 @@ import org.elasticsearch.index.cache.query.IndexQueryCache; import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.engine.InternalEngineTests; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.shard.IndexEventListener; -import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.IndexingOperationListener; import org.elasticsearch.index.shard.SearchOperationListener; import org.elasticsearch.index.shard.ShardId; @@ -159,10 +157,10 @@ private IndexService newIndexService(IndexModule module) throws IOException { public void testWrapperIsBound() throws IOException { final MockEngineFactory engineFactory = new MockEngineFactory(AssertingDirectoryReader.class); IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, engineFactory, Collections.emptyMap()); - module.setSearcherWrapper((s) -> new Wrapper()); + module.setReaderWrapper(s -> new Wrapper()); IndexService indexService = newIndexService(module); - assertTrue(indexService.getSearcherWrapper() instanceof Wrapper); + assertTrue(indexService.getReaderWrapper() instanceof Wrapper); assertSame(indexService.getEngineFactory(), module.getEngineFactory()); indexService.close("simon says", false); } @@ -321,7 +319,7 @@ public void testFrozen() { assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexEventListener(null)).getMessage()); assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexOperationListener(null)).getMessage()); assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSimilarity(null, null)).getMessage()); - assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.setSearcherWrapper(null)).getMessage()); + assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.setReaderWrapper(null)).getMessage()); assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.forceQueryCacheProvider(null)).getMessage()); } @@ -455,15 +453,9 @@ public Directory newDirectory(IndexSettings indexSettings, ShardPath shardPath) } } - public static final class Wrapper extends IndexSearcherWrapper { - - @Override - public DirectoryReader wrap(DirectoryReader reader) { - return null; - } - + public static final class Wrapper implements CheckedFunction { @Override - public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { + public DirectoryReader apply(DirectoryReader reader) { return null; } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 38ade1ec746cd..09872b418cc22 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -128,7 +128,6 @@ import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; -import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.shard.ShardUtils; @@ -723,36 +722,6 @@ public long getProcessedCheckpoint() { } } - public void testIndexSearcherWrapper() throws Exception { - final AtomicInteger counter = new AtomicInteger(); - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { - - @Override - public DirectoryReader wrap(DirectoryReader reader) { - counter.incrementAndGet(); - return reader; - } - - @Override - public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { - counter.incrementAndGet(); - return searcher; - } - }; - Store store = createStore(); - Path translog = createTempDir("translog-test"); - InternalEngine engine = createEngine(store, translog); - engine.close(); - - engine = new InternalEngine(engine.config()); - assertTrue(engine.isRecovering()); - engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); - Engine.Searcher searcher = wrapper.wrap(engine.acquireSearcher("test")); - assertThat(counter.get(), equalTo(2)); - searcher.close(); - IOUtils.close(store, engine); - } - public void testFlushIsDisabledDuringTranslogRecovery() throws IOException { assertFalse(engine.isRecovering()); ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java similarity index 65% rename from server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java rename to server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java index 7a422e82c2202..6abcacc1581c7 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java @@ -28,32 +28,24 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; -import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TotalHitCountCollector; -import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; +import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.engine.EngineException; -import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Collections; -import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import static org.hamcrest.Matchers.equalTo; - -public class IndexSearcherWrapperTests extends ESTestCase { +public class IndexReaderWrapperTests extends ESTestCase { public void testReaderCloseListenerIsCalled() throws IOException { Directory dir = newDirectory(); @@ -67,23 +59,13 @@ public void testReaderCloseListenerIsCalled() throws IOException { IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); final AtomicInteger closeCalls = new AtomicInteger(0); - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { - @Override - public DirectoryReader wrap(DirectoryReader reader) throws IOException { - return new FieldMaskingReader("field", reader, closeCalls); - } - - @Override - public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { - return searcher; - } - - }; + CheckedFunction wrapper = + reader -> new FieldMaskingReader("field", reader, closeCalls); final int sourceRefCount = open.getRefCount(); final AtomicInteger count = new AtomicInteger(); final AtomicInteger outerCount = new AtomicInteger(); final AtomicBoolean closeCalled = new AtomicBoolean(false); - final Engine.Searcher wrap = wrapper.wrap(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true))); + final Engine.Searcher wrap = IndexShard.wrapSearcher(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true)), wrapper); assertEquals(1, wrap.reader().getRefCount()); ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { if (key == open.getReaderCacheHelper().getKey()) { @@ -118,20 +100,11 @@ public void testIsCacheable() throws IOException { assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); searcher.setSimilarity(iwc.getSimilarity()); final AtomicInteger closeCalls = new AtomicInteger(0); - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { - @Override - public DirectoryReader wrap(DirectoryReader reader) throws IOException { - return new FieldMaskingReader("field", reader, closeCalls); - } - - @Override - public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { - return searcher; - } - }; + CheckedFunction wrapper = + reader -> new FieldMaskingReader("field", reader, closeCalls); final ConcurrentHashMap cache = new ConcurrentHashMap<>(); AtomicBoolean closeCalled = new AtomicBoolean(false); - try (Engine.Searcher wrap = wrapper.wrap(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true)))) { + try (Engine.Searcher wrap = IndexShard.wrapSearcher(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true)), wrapper)) { ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { cache.remove(key); }); @@ -159,58 +132,14 @@ public void testNoWrap() throws IOException { IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); searcher.setSimilarity(iwc.getSimilarity()); - IndexSearcherWrapper wrapper = new IndexSearcherWrapper(); + CheckedFunction wrapper = directoryReader -> directoryReader; try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, open::close)) { - final Engine.Searcher wrap = wrapper.wrap(engineSearcher); + final Engine.Searcher wrap = IndexShard.wrapSearcher(engineSearcher, wrapper); assertSame(wrap, engineSearcher); } IOUtils.close(writer, dir); } - public void testWrapVisibility() throws IOException { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(); - IndexWriter writer = new IndexWriter(dir, iwc); - Document doc = new Document(); - doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); - doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); - writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); - IndexSearcher searcher = new IndexSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { - @Override - public DirectoryReader wrap(DirectoryReader reader) throws IOException { - return reader; - } - - @Override - public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { - return new IndexSearcher(searcher.getIndexReader()) { - @Override - protected void search(List leaves, Weight weight, Collector collector) throws IOException { - throw new IllegalStateException("boum"); - } - }; - } - - }; - final AtomicBoolean closeCalled = new AtomicBoolean(false); - final Engine.Searcher wrap = wrapper.wrap(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true))); - assertEquals(1, wrap.reader().getRefCount()); - ContextIndexSearcher contextSearcher = new ContextIndexSearcher(wrap, wrap.searcher().getQueryCache(), - wrap.searcher().getQueryCachingPolicy()); - IllegalStateException exc = expectThrows(IllegalStateException.class, - () -> contextSearcher.search(new TermQuery(new Term("field", "doc")), new TotalHitCountCollector())); - assertThat(exc.getMessage(), equalTo("boum")); - wrap.close(); - assertFalse("wrapped reader is closed", wrap.reader().tryIncRef()); - assertTrue(closeCalled.get()); - - IOUtils.close(open, writer, dir); - assertEquals(0, open.getRefCount()); - } - private static class FieldMaskingReader extends FilterDirectoryReader { private final String field; private final AtomicInteger closeCalls; diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index b07e33d0305bd..c55a1dd61be1c 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.shard; +import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.store.LockObtainFailedException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; @@ -40,6 +41,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; @@ -528,7 +530,7 @@ public void testShardHasMemoryBufferOnTranslogRecover() throws Throwable { client().prepareIndex("test", "test", "1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON) .setRefreshPolicy(IMMEDIATE).get(); - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {}; + CheckedFunction wrapper = directoryReader -> directoryReader; shard.close("simon says", false); AtomicReference shardRef = new AtomicReference<>(); List failures = new ArrayList<>(); @@ -646,10 +648,10 @@ public static final IndexShard recoverShard(IndexShard newShard) throws IOExcept } public static final IndexShard newIndexShard( - final IndexService indexService, - final IndexShard shard,IndexSearcherWrapper wrapper, - final CircuitBreakerService cbs, - final IndexingOperationListener... listeners) throws IOException { + final IndexService indexService, + final IndexShard shard, CheckedFunction wrapper, + final CircuitBreakerService cbs, + final IndexingOperationListener... listeners) throws IOException { ShardRouting initializingShardRouting = getInitializingShardRouting(shard.routingEntry()); return new IndexShard( initializingShardRouting, diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index f07b8c977c7fb..637eca2599709 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.AlreadyClosedException; @@ -54,6 +53,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -82,7 +82,6 @@ import org.elasticsearch.index.engine.DocIdSeqNoAndSource; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine.DeleteResult; -import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.InternalEngineFactory; @@ -2361,7 +2360,7 @@ public void restoreShard(Store store, SnapshotId snapshotId, closeShards(target); } - public void testSearcherWrapperIsUsed() throws IOException { + public void testReaderWrapperIsUsed() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}"); @@ -2379,17 +2378,7 @@ public void testSearcherWrapperIsUsed() throws IOException { search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); assertEquals(search.totalHits.value, 1); } - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { - @Override - public DirectoryReader wrap(DirectoryReader reader) throws IOException { - return new FieldMaskingReader("foo", reader); - } - - @Override - public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { - return searcher; - } - }; + CheckedFunction wrapper = reader -> new FieldMaskingReader("foo", reader); closeShards(shard); IndexShard newShard = newShard( ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.ExistingStoreRecoverySource.INSTANCE), @@ -2421,18 +2410,8 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { closeShards(newShard); } - public void testSearcherWrapperWorksWithGlobalOrdinals() throws IOException { - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { - @Override - public DirectoryReader wrap(DirectoryReader reader) throws IOException { - return new FieldMaskingReader("foo", reader); - } - - @Override - public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { - return searcher; - } - }; + public void testReaderWrapperWorksWithGlobalOrdinals() throws IOException { + CheckedFunction wrapper = reader -> new FieldMaskingReader("foo", reader); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) @@ -2533,16 +2512,8 @@ public void testSearchIsReleaseIfWrapperFails() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); shard.refresh("test"); - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { - @Override - public DirectoryReader wrap(DirectoryReader reader) throws IOException { - throw new RuntimeException("boom"); - } - - @Override - public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { - return searcher; - } + CheckedFunction wrapper = reader -> { + throw new RuntimeException("boom"); }; closeShards(shard); diff --git a/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java b/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java index 6b4ddcf1ae64b..a48196e0ff790 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java @@ -18,17 +18,18 @@ */ package org.elasticsearch.indices; +import org.apache.lucene.index.DirectoryReader; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardIT; import org.elasticsearch.index.shard.IndexShardTestCase; @@ -426,7 +427,7 @@ public void testTranslogRecoveryWorksWithIMC() throws IOException { client().prepareIndex("test", "test", Integer.toString(i)).setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); } - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {}; + CheckedFunction wrapper = directoryReader -> directoryReader; shard.close("simon says", false); AtomicReference shardRef = new AtomicReference<>(); Settings settings = Settings.builder().put("indices.memory.index_buffer_size", "50kb").build(); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java new file mode 100644 index 0000000000000..ed2f972562c9c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -0,0 +1,425 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.internal; + +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilterDirectoryReader; +import org.apache.lucene.index.FilterLeafReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.Weight; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.BitSetIterator; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.CombinedBitSet; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.SparseFixedBitSet; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Collections; +import java.util.IdentityHashMap; +import java.util.Set; + +import static org.elasticsearch.search.internal.ContextIndexSearcher.intersectScorerAndBitSet; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class ContextIndexSearcherTests extends ESTestCase { + public void testIntersectScorerAndRoleBits() throws Exception { + final Directory directory = newDirectory(); + IndexWriter iw = new IndexWriter( + directory, + new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE) + ); + + Document document = new Document(); + document.add(new StringField("field1", "value1", Field.Store.NO)); + document.add(new StringField("field2", "value1", Field.Store.NO)); + iw.addDocument(document); + + document = new Document(); + document.add(new StringField("field1", "value2", Field.Store.NO)); + document.add(new StringField("field2", "value1", Field.Store.NO)); + iw.addDocument(document); + + document = new Document(); + document.add(new StringField("field1", "value3", Field.Store.NO)); + document.add(new StringField("field2", "value1", Field.Store.NO)); + iw.addDocument(document); + + document = new Document(); + document.add(new StringField("field1", "value4", Field.Store.NO)); + document.add(new StringField("field2", "value1", Field.Store.NO)); + iw.addDocument(document); + + iw.commit(); + iw.deleteDocuments(new Term("field1", "value3")); + iw.close(); + DirectoryReader directoryReader = DirectoryReader.open(directory); + IndexSearcher searcher = new IndexSearcher(directoryReader); + Weight weight = searcher.createWeight(new TermQuery(new Term("field2", "value1")), + org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f); + + LeafReaderContext leaf = directoryReader.leaves().get(0); + + CombinedBitSet bitSet = new CombinedBitSet(query(leaf, "field1", "value1"), leaf.reader().getLiveDocs()); + LeafCollector leafCollector = new LeafBucketCollector() { + @Override + public void collect(int doc, long bucket) throws IOException { + assertThat(doc, equalTo(0)); + } + }; + intersectScorerAndBitSet(weight.scorer(leaf), bitSet, leafCollector, () -> {}); + + bitSet = new CombinedBitSet(query(leaf, "field1", "value2"), leaf.reader().getLiveDocs()); + leafCollector = new LeafBucketCollector() { + @Override + public void collect(int doc, long bucket) throws IOException { + assertThat(doc, equalTo(1)); + } + }; + intersectScorerAndBitSet(weight.scorer(leaf), bitSet, leafCollector, () -> {}); + + + bitSet = new CombinedBitSet(query(leaf, "field1", "value3"), leaf.reader().getLiveDocs()); + leafCollector = new LeafBucketCollector() { + @Override + public void collect(int doc, long bucket) throws IOException { + fail("docId [" + doc + "] should have been deleted"); + } + }; + intersectScorerAndBitSet(weight.scorer(leaf), bitSet, leafCollector, () -> {}); + + bitSet = new CombinedBitSet(query(leaf, "field1", "value4"), leaf.reader().getLiveDocs()); + leafCollector = new LeafBucketCollector() { + @Override + public void collect(int doc, long bucket) throws IOException { + assertThat(doc, equalTo(3)); + } + }; + intersectScorerAndBitSet(weight.scorer(leaf), bitSet, leafCollector, () -> {}); + + directoryReader.close(); + directory.close(); + } + + public void testContextIndexSearcherSparseNoDeletions() throws IOException { + doTestContextIndexSearcher(true, false); + } + + public void testContextIndexSearcherDenseNoDeletions() throws IOException { + doTestContextIndexSearcher(false, false); + } + + public void testContextIndexSearcherSparseWithDeletions() throws IOException { + doTestContextIndexSearcher(true, true); + } + + public void testContextIndexSearcherDenseWithDeletions() throws IOException { + doTestContextIndexSearcher(false, true); + } + + public void doTestContextIndexSearcher(boolean sparse, boolean deletions) throws IOException { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(null)); + Document doc = new Document(); + StringField allowedField = new StringField("allowed", "yes", Field.Store.NO); + doc.add(allowedField); + StringField fooField = new StringField("foo", "bar", Field.Store.NO); + doc.add(fooField); + StringField deleteField = new StringField("delete", "no", Field.Store.NO); + doc.add(deleteField); + w.addDocument(doc); + if (deletions) { + // add a document that matches foo:bar but will be deleted + deleteField.setStringValue("yes"); + w.addDocument(doc); + deleteField.setStringValue("no"); + } + allowedField.setStringValue("no"); + w.addDocument(doc); + if (sparse) { + for (int i = 0; i < 1000; ++i) { + w.addDocument(doc); + } + w.forceMerge(1); + } + w.deleteDocuments(new Term("delete", "yes")); + + IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY); + BitsetFilterCache.Listener listener = new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) { + + } + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) { + + } + }; + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(w), + new ShardId(settings.getIndex(), 0)); + BitsetFilterCache cache = new BitsetFilterCache(settings, listener); + Query roleQuery = new TermQuery(new Term("allowed", "yes")); + BitSet bitSet = cache.getBitSetProducer(roleQuery).getBitSet(reader.leaves().get(0)); + if (sparse) { + assertThat(bitSet, instanceOf(SparseFixedBitSet.class)); + } else { + assertThat(bitSet, instanceOf(FixedBitSet.class)); + } + + DocumentSubsetDirectoryReader filteredReader = new DocumentSubsetDirectoryReader(reader, cache, roleQuery); + + ContextIndexSearcher searcher = new ContextIndexSearcher(filteredReader, IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); + searcher.setCheckCancelled(() -> {}); + + // Searching a non-existing term will trigger a null scorer + assertEquals(0, searcher.count(new TermQuery(new Term("non_existing_field", "non_existing_value")))); + + assertEquals(1, searcher.count(new TermQuery(new Term("foo", "bar")))); + + // make sure scorers are created only once, see #1725 + assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery()))); + IOUtils.close(reader, w, dir); + } + + private SparseFixedBitSet query(LeafReaderContext leaf, String field, String value) throws IOException { + SparseFixedBitSet sparseFixedBitSet = new SparseFixedBitSet(leaf.reader().maxDoc()); + TermsEnum tenum = leaf.reader().terms(field).iterator(); + while (tenum.next().utf8ToString().equals(value) == false) { + } + PostingsEnum penum = tenum.postings(null); + sparseFixedBitSet.or(penum); + return sparseFixedBitSet; + } + + private static class DocumentSubsetDirectoryReader extends FilterDirectoryReader { + private final BitsetFilterCache bitsetFilterCache; + private final Query roleQuery; + + DocumentSubsetDirectoryReader(DirectoryReader in, BitsetFilterCache bitsetFilterCache, Query roleQuery) throws IOException { + super(in, new SubReaderWrapper() { + @Override + public LeafReader wrap(LeafReader reader) { + try { + return new DocumentSubsetReader(reader, bitsetFilterCache, roleQuery); + } catch (Exception e) { + throw ExceptionsHelper.convertToElastic(e); + } + } + }); + this.bitsetFilterCache = bitsetFilterCache; + this.roleQuery = roleQuery; + } + + @Override + protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { + return new DocumentSubsetDirectoryReader(in, bitsetFilterCache, roleQuery); + } + + + @Override + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); + } + } + + private static class DocumentSubsetReader extends FilterLeafReader { + private final BitSet roleQueryBits; + private final int numDocs; + + /** + *

    Construct a FilterLeafReader based on the specified base reader. + *

    Note that base reader is closed if this FilterLeafReader is closed.

    + * + * @param in specified base reader. + */ + DocumentSubsetReader(LeafReader in, BitsetFilterCache bitsetFilterCache, Query roleQuery) throws IOException { + super(in); + this.roleQueryBits = bitsetFilterCache.getBitSetProducer(roleQuery).getBitSet(in.getContext()); + this.numDocs = computeNumDocs(in, roleQueryBits); + } + + @Override + public CacheHelper getCoreCacheHelper() { + return in.getCoreCacheHelper(); + } + + @Override + public CacheHelper getReaderCacheHelper() { + // Not delegated since we change the live docs + return null; + } + + @Override + public int numDocs() { + return numDocs; + } + + @Override + public Bits getLiveDocs() { + final Bits actualLiveDocs = in.getLiveDocs(); + if (roleQueryBits == null) { + return new Bits.MatchNoBits(in.maxDoc()); + } else if (actualLiveDocs == null) { + return roleQueryBits; + } else { + // apply deletes when needed: + return new CombinedBitSet(roleQueryBits, actualLiveDocs); + } + } + + private static int computeNumDocs(LeafReader reader, BitSet roleQueryBits) { + final Bits liveDocs = reader.getLiveDocs(); + if (roleQueryBits == null) { + return 0; + } else if (liveDocs == null) { + // slow + return roleQueryBits.cardinality(); + } else { + // very slow, but necessary in order to be correct + int numDocs = 0; + DocIdSetIterator it = new BitSetIterator(roleQueryBits, 0L); // we don't use the cost + try { + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + if (liveDocs.get(doc)) { + numDocs++; + } + } + return numDocs; + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + } + } + + private static class CreateScorerOnceWeight extends Weight { + + private final Weight weight; + private final Set seenLeaves = Collections.newSetFromMap(new IdentityHashMap<>()); + + CreateScorerOnceWeight(Weight weight) { + super(weight.getQuery()); + this.weight = weight; + } + + @Override + public void extractTerms(Set terms) { + weight.extractTerms(terms); + } + + @Override + public Explanation explain(LeafReaderContext context, int doc) throws IOException { + return weight.explain(context, doc); + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); + return weight.scorer(context); + } + + @Override + public BulkScorer bulkScorer(LeafReaderContext context) + throws IOException { + assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); + return weight.bulkScorer(context); + } + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return true; + } + } + + private static class CreateScorerOnceQuery extends Query { + + private final Query query; + + CreateScorerOnceQuery(Query query) { + this.query = query; + } + + @Override + public String toString(String field) { + return query.toString(field); + } + + @Override + public Query rewrite(IndexReader reader) throws IOException { + Query queryRewritten = query.rewrite(reader); + if (query != queryRewritten) { + return new CreateScorerOnceQuery(queryRewritten); + } + return super.rewrite(reader); + } + + @Override + public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException { + return new CreateScorerOnceWeight(query.createWeight(searcher, scoreMode, boost)); + } + + @Override + public boolean equals(Object obj) { + return sameClassAs(obj) && query.equals(((CreateScorerOnceQuery) obj).query); + } + + @Override + public int hashCode() { + return 31 * classHash() + query.hashCode(); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index 275349e15cdf1..b29d3ba3b7dd4 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -44,7 +44,6 @@ import org.apache.lucene.store.Directory; import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; -import org.elasticsearch.index.engine.Engine; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.test.ESTestCase; @@ -81,8 +80,8 @@ public static void setup() throws IOException { } reader = w.getReader(); w.close(); - Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), null); - searcher = new ContextIndexSearcher(engineSearcher, IndexSearcher.getDefaultQueryCache(), MAYBE_CACHE_POLICY); + searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), MAYBE_CACHE_POLICY); } @AfterClass @@ -159,10 +158,10 @@ public void testUseIndexStats() throws IOException { public void testApproximations() throws IOException { QueryProfiler profiler = new QueryProfiler(); - Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), reader::close); // disable query caching since we want to test approximations, which won't // be exposed on a cached entry - ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher, null, MAYBE_CACHE_POLICY); + ContextIndexSearcher searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(), + null, MAYBE_CACHE_POLICY); searcher.setProfiler(profiler); Query query = new RandomApproximationQuery(new TermQuery(new Term("foo", "bar")), random()); searcher.count(query); diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 4b5be29205778..32607fa607ef6 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.shard; +import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexNotFoundException; import org.apache.lucene.store.Directory; import org.elasticsearch.Version; @@ -278,8 +279,9 @@ protected IndexShard newShard(ShardId shardId, boolean primary, IndexingOperatio * (ready to recover from another shard) */ protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, IndexMetaData indexMetaData, - @Nullable IndexSearcherWrapper searcherWrapper) throws IOException { - return newShard(shardId, primary, nodeId, indexMetaData, searcherWrapper, () -> {}); + @Nullable CheckedFunction readerWrapper) + throws IOException { + return newShard(shardId, primary, nodeId, indexMetaData, readerWrapper, () -> {}); } /** @@ -291,11 +293,12 @@ protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, I * (ready to recover from another shard) */ protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, IndexMetaData indexMetaData, - @Nullable IndexSearcherWrapper searcherWrapper, Runnable globalCheckpointSyncer) throws IOException { + @Nullable CheckedFunction readerWrapper, + Runnable globalCheckpointSyncer) throws IOException { ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, nodeId, primary, ShardRoutingState.INITIALIZING, primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); return newShard( - shardRouting, indexMetaData, searcherWrapper, new InternalEngineFactory(), globalCheckpointSyncer, RetentionLeaseSyncer.EMPTY); + shardRouting, indexMetaData, readerWrapper, new InternalEngineFactory(), globalCheckpointSyncer, RetentionLeaseSyncer.EMPTY); } /** @@ -306,10 +309,11 @@ protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, I * @param indexMetaData indexMetaData for the shard, including any mapping * @param listeners an optional set of listeners to add to the shard */ - protected IndexShard newShard( - ShardRouting routing, IndexMetaData indexMetaData, EngineFactory engineFactory, IndexingOperationListener... listeners) + protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData, + @Nullable CheckedFunction indexReaderWrapper, + EngineFactory engineFactory, IndexingOperationListener... listeners) throws IOException { - return newShard(routing, indexMetaData, null, engineFactory, () -> {}, RetentionLeaseSyncer.EMPTY, listeners); + return newShard(routing, indexMetaData, indexReaderWrapper, engineFactory, () -> {}, RetentionLeaseSyncer.EMPTY, listeners); } /** @@ -317,22 +321,20 @@ protected IndexShard newShard( * current node id the shard is assigned to. * @param routing shard routing to use * @param indexMetaData indexMetaData for the shard, including any mapping - * @param indexSearcherWrapper an optional wrapper to be used during searchers + * @param indexReaderWrapper an optional wrapper to be used during search * @param globalCheckpointSyncer callback for syncing global checkpoints * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData, - @Nullable IndexSearcherWrapper indexSearcherWrapper, - @Nullable EngineFactory engineFactory, - Runnable globalCheckpointSyncer, - RetentionLeaseSyncer retentionLeaseSyncer, - IndexingOperationListener... listeners) + @Nullable CheckedFunction indexReaderWrapper, + @Nullable EngineFactory engineFactory, Runnable globalCheckpointSyncer, RetentionLeaseSyncer retentionLeaseSyncer, + IndexingOperationListener... listeners) throws IOException { // add node id as name to settings for proper logging final ShardId shardId = routing.shardId(); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); - return newShard(routing, shardPath, indexMetaData, null, indexSearcherWrapper, engineFactory, globalCheckpointSyncer, + return newShard(routing, shardPath, indexMetaData, null, indexReaderWrapper, engineFactory, globalCheckpointSyncer, retentionLeaseSyncer, EMPTY_EVENT_LISTENER, listeners); } @@ -342,14 +344,14 @@ protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData, * @param shardPath path to use for shard data * @param indexMetaData indexMetaData for the shard, including any mapping * @param storeProvider an optional custom store provider to use. If null a default file based store will be created - * @param indexSearcherWrapper an optional wrapper to be used during searchers + * @param indexReaderWrapper an optional wrapper to be used during search * @param globalCheckpointSyncer callback for syncing global checkpoints * @param indexEventListener index event listener * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMetaData indexMetaData, @Nullable CheckedFunction storeProvider, - @Nullable IndexSearcherWrapper indexSearcherWrapper, + @Nullable CheckedFunction indexReaderWrapper, @Nullable EngineFactory engineFactory, Runnable globalCheckpointSyncer, RetentionLeaseSyncer retentionLeaseSyncer, IndexEventListener indexEventListener, IndexingOperationListener... listeners) throws IOException { @@ -382,7 +384,7 @@ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMe similarityService, engineFactory, indexEventListener, - indexSearcherWrapper, + indexReaderWrapper, threadPool, BigArrays.NON_RECYCLING_INSTANCE, warmer, diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index efa9a7df4bad7..3bd3b6838a897 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -32,7 +32,6 @@ import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; -import org.apache.lucene.search.XIndexSearcher; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.lease.Releasable; @@ -47,7 +46,6 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache.Listener; import org.elasticsearch.index.cache.query.DisabledQueryCache; -import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; @@ -240,7 +238,6 @@ protected A createAggregator(Query query, } protected SearchContext createSearchContext(IndexSearcher indexSearcher, IndexSettings indexSettings) { - Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher, () -> indexSearcher.getIndexReader().close()); QueryCache queryCache = new DisabledQueryCache(indexSettings); QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { @Override @@ -253,7 +250,8 @@ public boolean shouldCache(Query query) { return false; } }; - ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher(searcher, queryCache, queryCachingPolicy); + ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher(indexSearcher.getIndexReader(), + indexSearcher.getSimilarity(), queryCache, queryCachingPolicy); SearchContext searchContext = mock(SearchContext.class); when(searchContext.numberOfShards()).thenReturn(1); @@ -464,16 +462,8 @@ protected static DirectoryReader wrap(DirectoryReader directoryReader) throws IO */ protected static IndexSearcher newIndexSearcher(IndexReader indexReader) { if (randomBoolean()) { - final IndexSearcher delegate = new IndexSearcher(indexReader); - final XIndexSearcher wrappedSearcher = new XIndexSearcher(delegate); // this executes basic query checks and asserts that weights are normalized only once etc. - return new AssertingIndexSearcher(random(), indexReader) { - @Override - protected void search(List leaves, Weight weight, Collector collector) throws IOException { - // we cannot use the asserting searcher because the weight is created by the ContextIndexSearcher - wrappedSearcher.search(leaves, weight, collector); - } - }; + return new AssertingIndexSearcher(random(), indexReader); } else { return new IndexSearcher(indexReader); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index b3a6fe8490895..52b086db338f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -24,14 +24,9 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.AssertingIndexSearcher; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.Weight; -import org.apache.lucene.search.XIndexSearcher; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Setting; @@ -47,7 +42,6 @@ import java.io.IOException; import java.lang.reflect.Constructor; import java.util.IdentityHashMap; -import java.util.List; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; @@ -151,19 +145,8 @@ public AssertingIndexSearcher newSearcher(Engine.Searcher searcher) throws Engin if (reader instanceof DirectoryReader && mockContext.wrapReader) { wrappedReader = wrapReader((DirectoryReader) reader); } - final IndexSearcher delegate = new IndexSearcher(wrappedReader); - delegate.setSimilarity(searcher.searcher().getSimilarity()); - delegate.setQueryCache(filterCache); - delegate.setQueryCachingPolicy(filterCachingPolicy); - final XIndexSearcher wrappedSearcher = new XIndexSearcher(delegate); // this executes basic query checks and asserts that weights are normalized only once etc. - final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(mockContext.random, wrappedReader) { - @Override - protected void search(List leaves, Weight weight, Collector collector) throws IOException { - // we cannot use the asserting searcher because the weight is created by the ContextIndexSearcher - wrappedSearcher.search(leaves, weight, collector); - } - }; + final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(mockContext.random, wrappedReader); assertingIndexSearcher.setSimilarity(searcher.searcher().getSimilarity()); assertingIndexSearcher.setQueryCache(filterCache); assertingIndexSearcher.setQueryCachingPolicy(filterCachingPolicy); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java index 631bd0b9ef9d2..412d64803a015 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java @@ -272,8 +272,8 @@ public static class ReacquireEngineSearcherListener implements SearchOperationLi @Override public void validateSearchContext(SearchContext context, TransportRequest transportRequest) { - Searcher engineSearcher = context.searcher().getEngineSearcher(); - LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(engineSearcher.getDirectoryReader()); + DirectoryReader dirReader = context.searcher().getDirectoryReader(); + LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(dirReader); if (lazyDirectoryReader != null) { try { lazyDirectoryReader.reset(); @@ -297,8 +297,8 @@ private void registerRelease(SearchContext context, LazyDirectoryReader lazyDire @Override public void onNewContext(SearchContext context) { - Searcher engineSearcher = context.searcher().getEngineSearcher(); - LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(engineSearcher.getDirectoryReader()); + DirectoryReader dirReader = context.searcher().getDirectoryReader(); + LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(dirReader); if (lazyDirectoryReader != null) { registerRelease(context, lazyDirectoryReader); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReader.java index c7d84b3c40fa9..af84315abf4eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReader.java @@ -16,6 +16,7 @@ import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.Bits; +import org.apache.lucene.util.CombinedBitSet; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; @@ -172,18 +173,7 @@ public Bits getLiveDocs() { return roleQueryBits; } else { // apply deletes when needed: - return new Bits() { - - @Override - public boolean get(int index) { - return roleQueryBits.get(index) && actualLiveDocs.get(index); - } - - @Override - public int length() { - return roleQueryBits.length(); - } - }; + return new CombinedBitSet(roleQueryBits, actualLiveDocs); } } @@ -208,13 +198,4 @@ public CacheHelper getReaderCacheHelper() { // Not delegated since we change the live docs return null; } - - BitSet getRoleQueryBits() { - return roleQueryBits; - } - - Bits getWrappedLiveDocs() { - return in.getLiveDocs(); - } - } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java new file mode 100644 index 0000000000000..6ea8ae84e118d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.accesscontrol; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.logging.LoggerMessageFormat; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; +import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions; +import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.core.security.user.User; + +import java.io.IOException; +import java.util.function.Function; + +/** + * An IndexReader wrapper implementation that is used for field and document level security. + *

    + * Based on the {@link ThreadContext} this class will enable field and/or document level security. + *

    + * Field level security is enabled by wrapping the original {@link DirectoryReader} in a {@link FieldSubsetReader} + * in the {@link #apply(DirectoryReader)} method. + *

    + * Document level security is enabled by wrapping the original {@link DirectoryReader} in a {@link DocumentSubsetReader} + * instance. + */ +public class SecurityIndexReaderWrapper implements CheckedFunction { + private static final Logger logger = LogManager.getLogger(SecurityIndexReaderWrapper.class); + + private final Function queryShardContextProvider; + private final BitsetFilterCache bitsetFilterCache; + private final XPackLicenseState licenseState; + private final ThreadContext threadContext; + private final ScriptService scriptService; + + public SecurityIndexReaderWrapper(Function queryShardContextProvider, + BitsetFilterCache bitsetFilterCache, ThreadContext threadContext, XPackLicenseState licenseState, + ScriptService scriptService) { + this.scriptService = scriptService; + this.queryShardContextProvider = queryShardContextProvider; + this.bitsetFilterCache = bitsetFilterCache; + this.threadContext = threadContext; + this.licenseState = licenseState; + } + + @Override + public DirectoryReader apply(final DirectoryReader reader) { + if (licenseState.isDocumentAndFieldLevelSecurityAllowed() == false) { + return reader; + } + + try { + final IndicesAccessControl indicesAccessControl = getIndicesAccessControl(); + + ShardId shardId = ShardUtils.extractShardId(reader); + if (shardId == null) { + throw new IllegalStateException(LoggerMessageFormat.format("couldn't extract shardId from reader [{}]", reader)); + } + + final IndicesAccessControl.IndexAccessControl permissions = indicesAccessControl.getIndexPermissions(shardId.getIndexName()); + // No permissions have been defined for an index, so don't intercept the index reader for access control + if (permissions == null) { + return reader; + } + + DirectoryReader wrappedReader = reader; + DocumentPermissions documentPermissions = permissions.getDocumentPermissions(); + if (documentPermissions != null && documentPermissions.hasDocumentLevelPermissions()) { + BooleanQuery filterQuery = documentPermissions.filter(getUser(), scriptService, shardId, queryShardContextProvider); + if (filterQuery != null) { + wrappedReader = DocumentSubsetReader.wrap(wrappedReader, bitsetFilterCache, new ConstantScoreQuery(filterQuery)); + } + } + + return permissions.getFieldPermissions().filter(wrappedReader); + } catch (IOException e) { + logger.error("Unable to apply field level security"); + throw ExceptionsHelper.convertToElastic(e); + } + } + + protected IndicesAccessControl getIndicesAccessControl() { + IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); + if (indicesAccessControl == null) { + throw Exceptions.authorizationError("no indices permissions found"); + } + return indicesAccessControl; + } + + protected User getUser(){ + Authentication authentication = Authentication.getAuthentication(threadContext); + return authentication.getUser(); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java deleted file mode 100644 index 6608e5a64c8e4..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.security.authz.accesscontrol; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BulkScorer; -import org.apache.lucene.search.CollectionTerminatedException; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.ConjunctionDISI; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.BitSetIterator; -import org.apache.lucene.util.Bits; -import org.apache.lucene.util.SparseFixedBitSet; -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.logging.LoggerMessageFormat; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.engine.EngineException; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.shard.IndexSearcherWrapper; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardUtils; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; -import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader; -import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions; -import org.elasticsearch.xpack.core.security.support.Exceptions; -import org.elasticsearch.xpack.core.security.user.User; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.function.Function; - -/** - * An {@link IndexSearcherWrapper} implementation that is used for field and document level security. - *

    - * Based on the {@link ThreadContext} this class will enable field and/or document level security. - *

    - * Field level security is enabled by wrapping the original {@link DirectoryReader} in a {@link FieldSubsetReader} - * in the {@link #wrap(DirectoryReader)} method. - *

    - * Document level security is enabled by wrapping the original {@link DirectoryReader} in a {@link DocumentSubsetReader} - * instance. - */ -public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper { - private static final Logger logger = LogManager.getLogger(SecurityIndexSearcherWrapper.class); - - private final Function queryShardContextProvider; - private final BitsetFilterCache bitsetFilterCache; - private final XPackLicenseState licenseState; - private final ThreadContext threadContext; - private final ScriptService scriptService; - - public SecurityIndexSearcherWrapper(Function queryShardContextProvider, - BitsetFilterCache bitsetFilterCache, ThreadContext threadContext, XPackLicenseState licenseState, - ScriptService scriptService) { - this.scriptService = scriptService; - this.queryShardContextProvider = queryShardContextProvider; - this.bitsetFilterCache = bitsetFilterCache; - this.threadContext = threadContext; - this.licenseState = licenseState; - } - - @Override - protected DirectoryReader wrap(final DirectoryReader reader) { - if (licenseState.isDocumentAndFieldLevelSecurityAllowed() == false) { - return reader; - } - - try { - final IndicesAccessControl indicesAccessControl = getIndicesAccessControl(); - - ShardId shardId = ShardUtils.extractShardId(reader); - if (shardId == null) { - throw new IllegalStateException(LoggerMessageFormat.format("couldn't extract shardId from reader [{}]", reader)); - } - - final IndicesAccessControl.IndexAccessControl permissions = indicesAccessControl.getIndexPermissions(shardId.getIndexName()); - // No permissions have been defined for an index, so don't intercept the index reader for access control - if (permissions == null) { - return reader; - } - - DirectoryReader wrappedReader = reader; - DocumentPermissions documentPermissions = permissions.getDocumentPermissions(); - if (documentPermissions != null && documentPermissions.hasDocumentLevelPermissions()) { - BooleanQuery filterQuery = documentPermissions.filter(getUser(), scriptService, shardId, queryShardContextProvider); - if (filterQuery != null) { - wrappedReader = DocumentSubsetReader.wrap(wrappedReader, bitsetFilterCache, new ConstantScoreQuery(filterQuery)); - } - } - - return permissions.getFieldPermissions().filter(wrappedReader); - } catch (IOException e) { - logger.error("Unable to apply field level security"); - throw ExceptionsHelper.convertToElastic(e); - } - } - - @Override - protected IndexSearcher wrap(IndexSearcher searcher) throws EngineException { - if (licenseState.isDocumentAndFieldLevelSecurityAllowed() == false) { - return searcher; - } - - final DirectoryReader directoryReader = (DirectoryReader) searcher.getIndexReader(); - if (directoryReader instanceof DocumentSubsetDirectoryReader) { - // The reasons why we return a custom searcher: - // 1) in the case the role query is sparse then large part of the main query can be skipped - // 2) If the role query doesn't match with any docs in a segment, that a segment can be skipped - IndexSearcher searcherWrapper = new IndexSearcherWrapper((DocumentSubsetDirectoryReader) directoryReader); - searcherWrapper.setQueryCache(searcher.getQueryCache()); - searcherWrapper.setQueryCachingPolicy(searcher.getQueryCachingPolicy()); - searcherWrapper.setSimilarity(searcher.getSimilarity()); - return searcherWrapper; - } - return searcher; - } - - static class IndexSearcherWrapper extends IndexSearcher { - - IndexSearcherWrapper(DocumentSubsetDirectoryReader r) { - super(r); - } - - @Override - protected void search(List leaves, Weight weight, Collector collector) throws IOException { - for (LeafReaderContext ctx : leaves) { // search each subreader - final LeafCollector leafCollector; - try { - leafCollector = collector.getLeafCollector(ctx); - } catch (CollectionTerminatedException e) { - // there is no doc of interest in this reader context - // continue with the following leaf - continue; - } - // The reader is always of type DocumentSubsetReader when we get here: - DocumentSubsetReader reader = (DocumentSubsetReader) ctx.reader(); - - BitSet roleQueryBits = reader.getRoleQueryBits(); - if (roleQueryBits == null) { - // nothing matches with the role query, so skip this segment: - continue; - } - - // if the role query result set is sparse then we should use the SparseFixedBitSet for advancing: - if (roleQueryBits instanceof SparseFixedBitSet) { - Scorer scorer = weight.scorer(ctx); - if (scorer != null) { - SparseFixedBitSet sparseFixedBitSet = (SparseFixedBitSet) roleQueryBits; - Bits realLiveDocs = reader.getWrappedLiveDocs(); - try { - intersectScorerAndRoleBits(scorer, sparseFixedBitSet, leafCollector, realLiveDocs); - } catch (CollectionTerminatedException e) { - // collection was terminated prematurely - // continue with the following leaf - } - } - } else { - BulkScorer bulkScorer = weight.bulkScorer(ctx); - if (bulkScorer != null) { - Bits liveDocs = reader.getLiveDocs(); - try { - bulkScorer.score(leafCollector, liveDocs); - } catch (CollectionTerminatedException e) { - // collection was terminated prematurely - // continue with the following leaf - } - } - } - } - } - } - - static void intersectScorerAndRoleBits(Scorer scorer, SparseFixedBitSet roleBits, LeafCollector collector, Bits acceptDocs) throws - IOException { - // ConjunctionDISI uses the DocIdSetIterator#cost() to order the iterators, so if roleBits has the lowest cardinality it should - // be used first: - DocIdSetIterator iterator = ConjunctionDISI.intersectIterators(Arrays.asList(new BitSetIterator(roleBits, - roleBits.approximateCardinality()), scorer.iterator())); - for (int docId = iterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { - if (acceptDocs == null || acceptDocs.get(docId)) { - collector.collect(docId); - } - } - } - - protected IndicesAccessControl getIndicesAccessControl() { - IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); - if (indicesAccessControl == null) { - throw Exceptions.authorizationError("no indices permissions found"); - } - return indicesAccessControl; - } - - protected User getUser(){ - Authentication authentication = Authentication.getAuthentication(threadContext); - return authentication.getUser(); - } - -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java index 948503b33478c..758cd16391c89 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java @@ -83,7 +83,7 @@ public void testSourceIncomplete() throws IOException { .primaryTerm(0, primaryTerm) .putMapping("_doc", "{\"_source\":{\"enabled\": false}}").build(); - IndexShard shard = newShard(shardRouting, metaData, new InternalEngineFactory()); + IndexShard shard = newShard(shardRouting, metaData, null, new InternalEngineFactory()); recoverShardFromStore(shard); for (int i = 0; i < 1; i++) { @@ -278,7 +278,7 @@ public IndexShard reindex(DirectoryReader reader, MappingMetaData mapping) throw .settings(settings) .primaryTerm(0, primaryTerm); metaData.putMapping(mapping); - IndexShard targetShard = newShard(targetShardRouting, metaData.build(), new InternalEngineFactory()); + IndexShard targetShard = newShard(targetShardRouting, metaData.build(), null, new InternalEngineFactory()); boolean success = false; try { recoverShardFromStore(targetShard); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java similarity index 93% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index 33a7a0945b56e..0b188ff7075f9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.test.AbstractBuilderTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -62,7 +63,7 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; -public class SecurityIndexSearcherWrapperIntegrationTests extends AbstractBuilderTestCase { +public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderTestCase { public void testDLS() throws Exception { ShardId shardId = new ShardId("_index", "_na_", 0); @@ -99,7 +100,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { }); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); - SecurityIndexSearcherWrapper wrapper = new SecurityIndexSearcherWrapper(s -> queryShardContext, + SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext, bitsetFilterCache, threadContext, licenseState, scriptService) { @Override @@ -156,8 +157,9 @@ protected IndicesAccessControl getIndicesAccessControl() { ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i]))); doReturn(new TermQueryBuilder("field", values[i])).when(queryShardContext).parseInnerQueryBuilder(any(XContentParser.class)); when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery); - DirectoryReader wrappedDirectoryReader = wrapper.wrap(directoryReader); - IndexSearcher indexSearcher = wrapper.wrap(new IndexSearcher(wrappedDirectoryReader)); + DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader); + IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader, + IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); int expectedHitCount = valuesHitCount[i]; logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount); @@ -222,7 +224,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); - SecurityIndexSearcherWrapper wrapper = new SecurityIndexSearcherWrapper(s -> queryShardContext, + SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext, bitsetFilterCache, threadContext, licenseState, scriptService) { @Override @@ -259,8 +261,9 @@ protected IndicesAccessControl getIndicesAccessControl() { iw.close(); DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); - DirectoryReader wrappedDirectoryReader = wrapper.wrap(directoryReader); - IndexSearcher indexSearcher = wrapper.wrap(new IndexSearcher(wrappedDirectoryReader)); + DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader); + IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader, + IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); ScoreDoc[] hits = indexSearcher.search(new MatchAllDocsQuery(), 1000).scoreDocs; Set actualDocIds = new HashSet<>(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperUnitTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperUnitTests.java new file mode 100644 index 0000000000000..0535c8aa4e211 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperUnitTests.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.accesscontrol; + +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.MMapDirectory; +import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.SeqNoFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions; +import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; +import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; +import org.junit.After; +import org.junit.Before; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class SecurityIndexReaderWrapperUnitTests extends ESTestCase { + + private static final Set META_FIELDS; + static { + final Set metaFields = new HashSet<>(Arrays.asList(MapperService.getAllMetaFields())); + metaFields.add(SourceFieldMapper.NAME); + metaFields.add(FieldNamesFieldMapper.NAME); + metaFields.add(SeqNoFieldMapper.NAME); + META_FIELDS = Collections.unmodifiableSet(metaFields); + } + + private ThreadContext threadContext; + private ScriptService scriptService; + private SecurityIndexReaderWrapper securityIndexReaderWrapper; + private ElasticsearchDirectoryReader esIn; + private XPackLicenseState licenseState; + + @Before + public void setup() throws Exception { + Index index = new Index("_index", "testUUID"); + scriptService = mock(ScriptService.class); + + ShardId shardId = new ShardId(index, 0); + licenseState = mock(XPackLicenseState.class); + when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); + threadContext = new ThreadContext(Settings.EMPTY); + IndexShard indexShard = mock(IndexShard.class); + when(indexShard.shardId()).thenReturn(shardId); + + Directory directory = new MMapDirectory(createTempDir()); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig()); + writer.close(); + + DirectoryReader in = DirectoryReader.open(directory); // unfortunately DirectoryReader isn't mock friendly + esIn = ElasticsearchDirectoryReader.wrap(in, shardId); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + esIn.close(); + } + + public void testDefaultMetaFields() throws Exception { + securityIndexReaderWrapper = + new SecurityIndexReaderWrapper(null, null, threadContext, licenseState, scriptService) { + @Override + protected IndicesAccessControl getIndicesAccessControl() { + IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, + new FieldPermissions(fieldPermissionDef(new String[]{}, null)), DocumentPermissions.allowAll()); + return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl)); + } + }; + + FieldSubsetReader.FieldSubsetDirectoryReader result = + (FieldSubsetReader.FieldSubsetDirectoryReader) securityIndexReaderWrapper.apply(esIn); + assertThat(result.getFilter().run("_uid"), is(true)); + assertThat(result.getFilter().run("_id"), is(true)); + assertThat(result.getFilter().run("_version"), is(true)); + assertThat(result.getFilter().run("_type"), is(true)); + assertThat(result.getFilter().run("_source"), is(true)); + assertThat(result.getFilter().run("_routing"), is(true)); + assertThat(result.getFilter().run("_timestamp"), is(true)); + assertThat(result.getFilter().run("_ttl"), is(true)); + assertThat(result.getFilter().run("_size"), is(true)); + assertThat(result.getFilter().run("_index"), is(true)); + assertThat(result.getFilter().run("_field_names"), is(true)); + assertThat(result.getFilter().run("_seq_no"), is(true)); + assertThat(result.getFilter().run("_some_random_meta_field"), is(true)); + assertThat(result.getFilter().run("some_random_regular_field"), is(false)); + } + + public void testWrapReaderWhenFeatureDisabled() throws Exception { + when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false); + securityIndexReaderWrapper = + new SecurityIndexReaderWrapper(null, null, threadContext, licenseState, scriptService); + DirectoryReader reader = securityIndexReaderWrapper.apply(esIn); + assertThat(reader, sameInstance(esIn)); + } + + public void testWildcards() throws Exception { + Set expected = new HashSet<>(META_FIELDS); + expected.add("field1_a"); + expected.add("field1_b"); + expected.add("field1_c"); + assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"field1*"}, null)), expected, "field", "field2"); + } + + public void testDotNotion() throws Exception { + Set expected = new HashSet<>(META_FIELDS); + expected.add("foo.bar"); + assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"foo.bar"}, null)), expected, "foo", "foo.baz", "bar.foo"); + + expected = new HashSet<>(META_FIELDS); + expected.add("foo.bar"); + assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"foo.*"}, null)), expected, "foo", "bar"); + } + + private void assertResolved(FieldPermissions permissions, Set expected, String... fieldsToTest) { + for (String field : expected) { + assertThat(field, permissions.grantsAccessTo(field), is(true)); + } + for (String field : fieldsToTest) { + assertThat(field, permissions.grantsAccessTo(field), is(expected.contains(field))); + } + } + + public void testFieldPermissionsWithFieldExceptions() throws Exception { + securityIndexReaderWrapper = + new SecurityIndexReaderWrapper(null, null, threadContext, licenseState, null); + String[] grantedFields = new String[]{}; + String[] deniedFields; + Set expected = new HashSet<>(META_FIELDS); + // Presence of fields in a role with an empty array implies access to no fields except the meta fields + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), + expected, "foo", "bar"); + + // make sure meta fields cannot be denied access to + deniedFields = META_FIELDS.toArray(new String[0]); + assertResolved(new FieldPermissions(fieldPermissionDef(null, deniedFields)), + new HashSet<>(Arrays.asList("foo", "bar", "_some_plugin_meta_field"))); + + // check we can add all fields with * + grantedFields = new String[]{"*"}; + expected = new HashSet<>(META_FIELDS); + expected.add("foo"); + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), expected); + + // same with null + grantedFields = null; + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), expected); + + // check we remove only excluded fields + grantedFields = new String[]{"*"}; + deniedFields = new String[]{"xfield"}; + expected = new HashSet<>(META_FIELDS); + expected.add("foo"); + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "xfield"); + + // same with null + grantedFields = null; + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "xfield"); + + // some other checks + grantedFields = new String[]{"field*"}; + deniedFields = new String[]{"field1", "field2"}; + expected = new HashSet<>(META_FIELDS); + expected.add("field3"); + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field1", "field2"); + + grantedFields = new String[]{"field1", "field2"}; + deniedFields = new String[]{"field2"}; + expected = new HashSet<>(META_FIELDS); + expected.add("field1"); + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field1", "field2"); + + grantedFields = new String[]{"field*"}; + deniedFields = new String[]{"field2"}; + expected = new HashSet<>(META_FIELDS); + expected.add("field1"); + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field2"); + + deniedFields = new String[]{"field*"}; + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), + META_FIELDS, "field1", "field2"); + + // empty array for allowed fields always means no field is allowed + grantedFields = new String[]{}; + deniedFields = new String[]{}; + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), + META_FIELDS, "field1", "field2"); + + // make sure all field can be explicitly allowed + grantedFields = new String[]{"*"}; + deniedFields = randomBoolean() ? null : new String[]{}; + expected = new HashSet<>(META_FIELDS); + expected.add("field1"); + assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected); + } + + private static FieldPermissionsDefinition fieldPermissionDef(String[] granted, String[] denied) { + return new FieldPermissionsDefinition(granted, denied); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java deleted file mode 100644 index 3da3949bad967..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java +++ /dev/null @@ -1,561 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.security.authz.accesscontrol; - -import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.Field.Store; -import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.misc.SweetSpotSimilarity; -import org.apache.lucene.search.BulkScorer; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.Weight; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.FixedBitSet; -import org.apache.lucene.util.SparseFixedBitSet; -import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.FieldNamesFieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.SeqNoFieldMapper; -import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.aggregations.LeafBucketCollector; -import org.elasticsearch.search.internal.ContextIndexSearcher; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader; -import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions; -import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; -import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.Set; - -import static java.util.Collections.singletonMap; -import static org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexSearcherWrapper.intersectScorerAndRoleBits; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.sameInstance; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { - - private static final Set META_FIELDS; - static { - final Set metaFields = new HashSet<>(Arrays.asList(MapperService.getAllMetaFields())); - metaFields.add(SourceFieldMapper.NAME); - metaFields.add(FieldNamesFieldMapper.NAME); - metaFields.add(SeqNoFieldMapper.NAME); - META_FIELDS = Collections.unmodifiableSet(metaFields); - } - - private ThreadContext threadContext; - private ScriptService scriptService; - private SecurityIndexSearcherWrapper securityIndexSearcherWrapper; - private ElasticsearchDirectoryReader esIn; - private XPackLicenseState licenseState; - private IndexSettings indexSettings; - - @Before - public void setup() throws Exception { - Index index = new Index("_index", "testUUID"); - scriptService = mock(ScriptService.class); - indexSettings = IndexSettingsModule.newIndexSettings(index, Settings.EMPTY); - - ShardId shardId = new ShardId(index, 0); - licenseState = mock(XPackLicenseState.class); - when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); - threadContext = new ThreadContext(Settings.EMPTY); - IndexShard indexShard = mock(IndexShard.class); - when(indexShard.shardId()).thenReturn(shardId); - - Directory directory = new MMapDirectory(createTempDir()); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig()); - writer.close(); - - DirectoryReader in = DirectoryReader.open(directory); // unfortunately DirectoryReader isn't mock friendly - esIn = ElasticsearchDirectoryReader.wrap(in, shardId); - } - - @After - public void tearDown() throws Exception { - super.tearDown(); - esIn.close(); - } - - public void testDefaultMetaFields() throws Exception { - securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService) { - @Override - protected IndicesAccessControl getIndicesAccessControl() { - IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, - new FieldPermissions(fieldPermissionDef(new String[]{}, null)), DocumentPermissions.allowAll()); - return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl)); - } - }; - - FieldSubsetReader.FieldSubsetDirectoryReader result = - (FieldSubsetReader.FieldSubsetDirectoryReader) securityIndexSearcherWrapper.wrap(esIn); - assertThat(result.getFilter().run("_uid"), is(true)); - assertThat(result.getFilter().run("_id"), is(true)); - assertThat(result.getFilter().run("_version"), is(true)); - assertThat(result.getFilter().run("_type"), is(true)); - assertThat(result.getFilter().run("_source"), is(true)); - assertThat(result.getFilter().run("_routing"), is(true)); - assertThat(result.getFilter().run("_timestamp"), is(true)); - assertThat(result.getFilter().run("_ttl"), is(true)); - assertThat(result.getFilter().run("_size"), is(true)); - assertThat(result.getFilter().run("_index"), is(true)); - assertThat(result.getFilter().run("_field_names"), is(true)); - assertThat(result.getFilter().run("_seq_no"), is(true)); - assertThat(result.getFilter().run("_some_random_meta_field"), is(true)); - assertThat(result.getFilter().run("some_random_regular_field"), is(false)); - } - - public void testWrapReaderWhenFeatureDisabled() throws Exception { - when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false); - securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService); - DirectoryReader reader = securityIndexSearcherWrapper.wrap(esIn); - assertThat(reader, sameInstance(esIn)); - } - - public void testWrapSearcherWhenFeatureDisabled() throws Exception { - securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService); - IndexSearcher indexSearcher = new IndexSearcher(esIn); - IndexSearcher result = securityIndexSearcherWrapper.wrap(indexSearcher); - assertThat(result, sameInstance(indexSearcher)); - } - - public void testWildcards() throws Exception { - Set expected = new HashSet<>(META_FIELDS); - expected.add("field1_a"); - expected.add("field1_b"); - expected.add("field1_c"); - assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"field1*"}, null)), expected, "field", "field2"); - } - - public void testDotNotion() throws Exception { - Set expected = new HashSet<>(META_FIELDS); - expected.add("foo.bar"); - assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"foo.bar"}, null)), expected, "foo", "foo.baz", "bar.foo"); - - expected = new HashSet<>(META_FIELDS); - expected.add("foo.bar"); - assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"foo.*"}, null)), expected, "foo", "bar"); - } - - public void testDelegateSimilarity() throws Exception { - IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY); - BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, new BitsetFilterCache.Listener() { - @Override - public void onCache(ShardId shardId, Accountable accountable) { - } - - @Override - public void onRemoval(ShardId shardId, Accountable accountable) { - - } - }); - DirectoryReader directoryReader = DocumentSubsetReader.wrap(esIn, bitsetFilterCache, new MatchAllDocsQuery()); - IndexSearcher indexSearcher = new IndexSearcher(directoryReader); - indexSearcher.setSimilarity(new SweetSpotSimilarity()); - indexSearcher.setQueryCachingPolicy(new QueryCachingPolicy() { - @Override - public void onUse(Query query) { - } - - @Override - public boolean shouldCache(Query query) { - return false; - } - }); - indexSearcher.setQueryCache((weight, policy) -> weight); - securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService); - IndexSearcher result = securityIndexSearcherWrapper.wrap(indexSearcher); - assertThat(result, not(sameInstance(indexSearcher))); - assertThat(result.getSimilarity(), sameInstance(indexSearcher.getSimilarity())); - assertThat(result.getQueryCachingPolicy(), sameInstance(indexSearcher.getQueryCachingPolicy())); - assertThat(result.getQueryCache(), sameInstance(indexSearcher.getQueryCache())); - bitsetFilterCache.close(); - } - - public void testIntersectScorerAndRoleBits() throws Exception { - securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService); - final Directory directory = newDirectory(); - IndexWriter iw = new IndexWriter( - directory, - new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE) - ); - - Document document = new Document(); - document.add(new StringField("field1", "value1", Field.Store.NO)); - document.add(new StringField("field2", "value1", Field.Store.NO)); - iw.addDocument(document); - - document = new Document(); - document.add(new StringField("field1", "value2", Field.Store.NO)); - document.add(new StringField("field2", "value1", Field.Store.NO)); - iw.addDocument(document); - - document = new Document(); - document.add(new StringField("field1", "value3", Field.Store.NO)); - document.add(new StringField("field2", "value1", Field.Store.NO)); - iw.addDocument(document); - - document = new Document(); - document.add(new StringField("field1", "value4", Field.Store.NO)); - document.add(new StringField("field2", "value1", Field.Store.NO)); - iw.addDocument(document); - - iw.commit(); - iw.deleteDocuments(new Term("field1", "value3")); - iw.close(); - DirectoryReader directoryReader = DirectoryReader.open(directory); - IndexSearcher searcher = new IndexSearcher(directoryReader); - Weight weight = searcher.createWeight(new TermQuery(new Term("field2", "value1")), - org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f); - - LeafReaderContext leaf = directoryReader.leaves().get(0); - - SparseFixedBitSet sparseFixedBitSet = query(leaf, "field1", "value1"); - LeafCollector leafCollector = new LeafBucketCollector() { - @Override - public void collect(int doc, long bucket) throws IOException { - assertThat(doc, equalTo(0)); - } - }; - intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector, leaf.reader().getLiveDocs()); - - sparseFixedBitSet = query(leaf, "field1", "value2"); - leafCollector = new LeafBucketCollector() { - @Override - public void collect(int doc, long bucket) throws IOException { - assertThat(doc, equalTo(1)); - } - }; - intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector, leaf.reader().getLiveDocs()); - - - sparseFixedBitSet = query(leaf, "field1", "value3"); - leafCollector = new LeafBucketCollector() { - @Override - public void collect(int doc, long bucket) throws IOException { - fail("docId [" + doc + "] should have been deleted"); - } - }; - intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector, leaf.reader().getLiveDocs()); - - sparseFixedBitSet = query(leaf, "field1", "value4"); - leafCollector = new LeafBucketCollector() { - @Override - public void collect(int doc, long bucket) throws IOException { - assertThat(doc, equalTo(3)); - } - }; - intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector, leaf.reader().getLiveDocs()); - - directoryReader.close(); - directory.close(); - } - - private void assertResolved(FieldPermissions permissions, Set expected, String... fieldsToTest) { - for (String field : expected) { - assertThat(field, permissions.grantsAccessTo(field), is(true)); - } - for (String field : fieldsToTest) { - assertThat(field, permissions.grantsAccessTo(field), is(expected.contains(field))); - } - } - - public void testFieldPermissionsWithFieldExceptions() throws Exception { - securityIndexSearcherWrapper = - new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, null); - String[] grantedFields = new String[]{}; - String[] deniedFields; - Set expected = new HashSet<>(META_FIELDS); - // Presence of fields in a role with an empty array implies access to no fields except the meta fields - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), - expected, "foo", "bar"); - - // make sure meta fields cannot be denied access to - deniedFields = META_FIELDS.toArray(new String[0]); - assertResolved(new FieldPermissions(fieldPermissionDef(null, deniedFields)), - new HashSet<>(Arrays.asList("foo", "bar", "_some_plugin_meta_field"))); - - // check we can add all fields with * - grantedFields = new String[]{"*"}; - expected = new HashSet<>(META_FIELDS); - expected.add("foo"); - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), expected); - - // same with null - grantedFields = null; - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), expected); - - // check we remove only excluded fields - grantedFields = new String[]{"*"}; - deniedFields = new String[]{"xfield"}; - expected = new HashSet<>(META_FIELDS); - expected.add("foo"); - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "xfield"); - - // same with null - grantedFields = null; - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "xfield"); - - // some other checks - grantedFields = new String[]{"field*"}; - deniedFields = new String[]{"field1", "field2"}; - expected = new HashSet<>(META_FIELDS); - expected.add("field3"); - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field1", "field2"); - - grantedFields = new String[]{"field1", "field2"}; - deniedFields = new String[]{"field2"}; - expected = new HashSet<>(META_FIELDS); - expected.add("field1"); - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field1", "field2"); - - grantedFields = new String[]{"field*"}; - deniedFields = new String[]{"field2"}; - expected = new HashSet<>(META_FIELDS); - expected.add("field1"); - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field2"); - - deniedFields = new String[]{"field*"}; - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), - META_FIELDS, "field1", "field2"); - - // empty array for allowed fields always means no field is allowed - grantedFields = new String[]{}; - deniedFields = new String[]{}; - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), - META_FIELDS, "field1", "field2"); - - // make sure all field can be explicitly allowed - grantedFields = new String[]{"*"}; - deniedFields = randomBoolean() ? null : new String[]{}; - expected = new HashSet<>(META_FIELDS); - expected.add("field1"); - assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected); - } - - private SparseFixedBitSet query(LeafReaderContext leaf, String field, String value) throws IOException { - SparseFixedBitSet sparseFixedBitSet = new SparseFixedBitSet(leaf.reader().maxDoc()); - TermsEnum tenum = leaf.reader().terms(field).iterator(); - while (tenum.next().utf8ToString().equals(value) == false) { - } - PostingsEnum penum = tenum.postings(null); - sparseFixedBitSet.or(penum); - return sparseFixedBitSet; - } - - public void testIndexSearcherWrapperSparseNoDeletions() throws IOException { - doTestIndexSearcherWrapper(true, false); - } - - public void testIndexSearcherWrapperDenseNoDeletions() throws IOException { - doTestIndexSearcherWrapper(false, false); - } - - public void testIndexSearcherWrapperSparseWithDeletions() throws IOException { - doTestIndexSearcherWrapper(true, true); - } - - public void testIndexSearcherWrapperDenseWithDeletions() throws IOException { - doTestIndexSearcherWrapper(false, true); - } - - static class CreateScorerOnceWeight extends Weight { - - private final Weight weight; - private final Set seenLeaves = Collections.newSetFromMap(new IdentityHashMap<>()); - - protected CreateScorerOnceWeight(Weight weight) { - super(weight.getQuery()); - this.weight = weight; - } - - @Override - public void extractTerms(Set terms) { - weight.extractTerms(terms); - } - - @Override - public Explanation explain(LeafReaderContext context, int doc) throws IOException { - return weight.explain(context, doc); - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.scorer(context); - } - - @Override - public BulkScorer bulkScorer(LeafReaderContext context) - throws IOException { - assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.bulkScorer(context); - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return true; - } - } - - static class CreateScorerOnceQuery extends Query { - - private final Query query; - - CreateScorerOnceQuery(Query query) { - this.query = query; - } - - @Override - public String toString(String field) { - return query.toString(field); - } - - @Override - public Query rewrite(IndexReader reader) throws IOException { - Query queryRewritten = query.rewrite(reader); - if (query != queryRewritten) { - return new CreateScorerOnceQuery(queryRewritten); - } - return super.rewrite(reader); - } - - @Override - public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException { - return new CreateScorerOnceWeight(query.createWeight(searcher, scoreMode, boost)); - } - - @Override - public boolean equals(Object obj) { - return sameClassAs(obj) && query.equals(((CreateScorerOnceQuery) obj).query); - } - - @Override - public int hashCode() { - return 31 * classHash() + query.hashCode(); - } - } - - public void doTestIndexSearcherWrapper(boolean sparse, boolean deletions) throws IOException { - Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(null)); - Document doc = new Document(); - StringField allowedField = new StringField("allowed", "yes", Store.NO); - doc.add(allowedField); - StringField fooField = new StringField("foo", "bar", Store.NO); - doc.add(fooField); - StringField deleteField = new StringField("delete", "no", Store.NO); - doc.add(deleteField); - w.addDocument(doc); - if (deletions) { - // add a document that matches foo:bar but will be deleted - deleteField.setStringValue("yes"); - w.addDocument(doc); - deleteField.setStringValue("no"); - } - allowedField.setStringValue("no"); - w.addDocument(doc); - if (sparse) { - for (int i = 0; i < 1000; ++i) { - w.addDocument(doc); - } - w.forceMerge(1); - } - w.deleteDocuments(new Term("delete", "yes")); - - IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY); - BitsetFilterCache.Listener listener = new BitsetFilterCache.Listener() { - @Override - public void onCache(ShardId shardId, Accountable accountable) { - - } - - @Override - public void onRemoval(ShardId shardId, Accountable accountable) { - - } - }; - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(w), new ShardId(indexSettings.getIndex(), 0)); - BitsetFilterCache cache = new BitsetFilterCache(settings, listener); - Query roleQuery = new TermQuery(new Term("allowed", "yes")); - BitSet bitSet = cache.getBitSetProducer(roleQuery).getBitSet(reader.leaves().get(0)); - if (sparse) { - assertThat(bitSet, instanceOf(SparseFixedBitSet.class)); - } else { - assertThat(bitSet, instanceOf(FixedBitSet.class)); - } - - DocumentSubsetDirectoryReader filteredReader = DocumentSubsetReader.wrap(reader, cache, roleQuery); - IndexSearcher wrapSearcher = new SecurityIndexSearcherWrapper.IndexSearcherWrapper(filteredReader); - Engine.Searcher engineSearcher = new Engine.Searcher("test", wrapSearcher, () -> {}); - ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher, - wrapSearcher.getQueryCache(), wrapSearcher.getQueryCachingPolicy()); - searcher.setCheckCancelled(() -> {}); - - // Searching a non-existing term will trigger a null scorer - assertEquals(0, searcher.count(new TermQuery(new Term("non_existing_field", "non_existing_value")))); - - assertEquals(1, searcher.count(new TermQuery(new Term("foo", "bar")))); - - // make sure scorers are created only once, see #1725 - assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery()))); - IOUtils.close(reader, w, dir); - } - - private static FieldPermissionsDefinition fieldPermissionDef(String[] granted, String[] denied) { - return new FieldPermissionsDefinition(granted, denied); - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 56797baff6889..1f4f87e858176 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -119,7 +119,7 @@ import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; -import org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexSearcherWrapper; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexReaderWrapper; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; @@ -641,8 +641,8 @@ public void onIndexModule(IndexModule module) { if (enabled) { assert getLicenseState() != null; if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { - module.setSearcherWrapper(indexService -> - new SecurityIndexSearcherWrapper( + module.setReaderWrapper(indexService -> + new SecurityIndexReaderWrapper( shardId -> indexService.newQueryShardContext(shardId.id(), // we pass a null index reader, which is legal and will disable rewrite optimizations // based on index statistics, which is probably safer... From 88c9ecb0b06c0c1951900e162687fdd8c66bd972 Mon Sep 17 00:00:00 2001 From: weizijun Date: Fri, 28 Jun 2019 21:47:24 +0800 Subject: [PATCH 063/140] Fix threshold spelling errors (#43326) Substitutes treshold by threshold --- .../decider/DiskThresholdDecider.java | 6 ++--- .../allocation/BalanceConfigurationTests.java | 22 +++++++++---------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 1f048fca76c09..0838999c4f367 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -131,12 +131,12 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing // flag that determines whether the low threshold checks below can be skipped. We use this for a primary shard that is freshly // allocated and empty. - boolean skipLowTresholdChecks = shardRouting.primary() && + boolean skipLowThresholdChecks = shardRouting.primary() && shardRouting.active() == false && shardRouting.recoverySource().getType() == RecoverySource.Type.EMPTY_STORE; // checks for exact byte comparisons if (freeBytes < diskThresholdSettings.getFreeBytesThresholdLow().getBytes()) { - if (skipLowTresholdChecks == false) { + if (skipLowThresholdChecks == false) { if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} free) on node {}, preventing allocation", diskThresholdSettings.getFreeBytesThresholdLow(), freeBytesValue, node.nodeId()); @@ -178,7 +178,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing // checks for percentage comparisons if (freeDiskPercentage < diskThresholdSettings.getFreeDiskThresholdLow()) { // If the shard is a replica or is a non-empty primary, check the low threshold - if (skipLowTresholdChecks == false) { + if (skipLowThresholdChecks == false) { if (logger.isDebugEnabled()) { logger.debug("more than the allowed {} used disk threshold ({} used) on node [{}], preventing allocation", Strings.format1Decimals(usedDiskThresholdLow, "%"), diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index 10fa01c2a2b12..ba18bc0f6d42d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -64,28 +64,28 @@ public void testIndexBalance() { /* Tests balance over indices only */ final float indexBalance = 1.0f; final float replicaBalance = 0.0f; - final float balanceTreshold = 1.0f; + final float balanceThreshold = 1.0f; Settings.Builder settings = Settings.builder(); settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); settings.put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), indexBalance); settings.put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), replicaBalance); - settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceTreshold); + settings.put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), balanceThreshold); AllocationService strategy = createAllocationService(settings.build(), new TestGatewayAllocator()); ClusterState clusterState = initCluster(strategy); assertIndexBalance(clusterState.getRoutingTable(), clusterState.getRoutingNodes(), numberOfNodes, numberOfIndices, - numberOfReplicas, numberOfShards, balanceTreshold); + numberOfReplicas, numberOfShards, balanceThreshold); clusterState = addNode(clusterState, strategy); assertIndexBalance(clusterState.getRoutingTable(), clusterState.getRoutingNodes(), numberOfNodes + 1, - numberOfIndices, numberOfReplicas, numberOfShards, balanceTreshold); + numberOfIndices, numberOfReplicas, numberOfShards, balanceThreshold); clusterState = removeNodes(clusterState, strategy); assertIndexBalance(clusterState.getRoutingTable(), clusterState.getRoutingNodes(), - (numberOfNodes + 1) - (numberOfNodes + 1) / 2, numberOfIndices, numberOfReplicas, numberOfShards, balanceTreshold); + (numberOfNodes + 1) - (numberOfNodes + 1) / 2, numberOfIndices, numberOfReplicas, numberOfShards, balanceThreshold); } public void testReplicaBalance() { @@ -201,7 +201,7 @@ private ClusterState removeNodes(ClusterState clusterState, AllocationService st private void assertReplicaBalance(RoutingNodes nodes, int numberOfNodes, int numberOfIndices, int numberOfReplicas, - int numberOfShards, float treshold) { + int numberOfShards, float threshold) { final int unassigned = nodes.unassigned().size(); if (unassigned > 0) { @@ -220,8 +220,8 @@ private void assertReplicaBalance(RoutingNodes nodes, int numberOfNodes, int num final int numShards = numberOfIndices * numberOfShards * (numberOfReplicas + 1) - unassigned; final float avgNumShards = (float) (numShards) / (float) (numberOfNodes); - final int minAvgNumberOfShards = Math.round(Math.round(Math.floor(avgNumShards - treshold))); - final int maxAvgNumberOfShards = Math.round(Math.round(Math.ceil(avgNumShards + treshold))); + final int minAvgNumberOfShards = Math.round(Math.round(Math.floor(avgNumShards - threshold))); + final int maxAvgNumberOfShards = Math.round(Math.round(Math.ceil(avgNumShards + threshold))); for (RoutingNode node : nodes) { assertThat(node.shardsWithState(STARTED).size(), Matchers.greaterThanOrEqualTo(minAvgNumberOfShards)); @@ -230,12 +230,12 @@ private void assertReplicaBalance(RoutingNodes nodes, int numberOfNodes, int num } private void assertIndexBalance(RoutingTable routingTable, RoutingNodes nodes, int numberOfNodes, int numberOfIndices, - int numberOfReplicas, int numberOfShards, float treshold) { + int numberOfReplicas, int numberOfShards, float threshold) { final int numShards = numberOfShards * (numberOfReplicas + 1); final float avgNumShards = (float) (numShards) / (float) (numberOfNodes); - final int minAvgNumberOfShards = Math.round(Math.round(Math.floor(avgNumShards - treshold))); - final int maxAvgNumberOfShards = Math.round(Math.round(Math.ceil(avgNumShards + treshold))); + final int minAvgNumberOfShards = Math.round(Math.round(Math.floor(avgNumShards - threshold))); + final int maxAvgNumberOfShards = Math.round(Math.round(Math.ceil(avgNumShards + threshold))); for (ObjectCursor index : routingTable.indicesRouting().keys()) { for (RoutingNode node : nodes) { From c900795df87b9e196fa1339cc6ac223e602e1d00 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 28 Jun 2019 16:18:54 +0200 Subject: [PATCH 064/140] Trim translog for closed indices (#43156) Today when an index is closed all its shards are forced flushed but the translog files are left around. As explained in #42445 we'd like to trim the translog for closed indices in order to consume less disk space. This commit reuses the existing AsyncTrimTranslogTask task and reenables it for closed indices. At the time the task is executed, we should have the guarantee that nothing holds the translog files that are going to be removed. It also leaves a short period of time (10 min) during which translog files of a recently closed index are still present on disk. This could also help in some cases where the closed index is reopened shortly after being closed (in order to update an index setting for example). Relates to #42445 --- .../org/elasticsearch/index/IndexService.java | 9 ++- .../index/engine/NoOpEngine.java | 58 ++++++++++++++++++- .../index/IndexServiceTests.java | 47 ++++++++++++++- .../index/engine/NoOpEngineTests.java | 43 +++++++++++++- 4 files changed, 152 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index c090b2b0c9837..c6c5a7b90106a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -940,6 +940,11 @@ final class AsyncTrimTranslogTask extends BaseAsyncTask { .getSettings().getAsTime(INDEX_TRANSLOG_RETENTION_CHECK_INTERVAL_SETTING, TimeValue.timeValueMinutes(10))); } + @Override + protected boolean mustReschedule() { + return indexService.closed.get() == false; + } + @Override protected void runInternal() { indexService.maybeTrimTranslog(); @@ -1031,8 +1036,8 @@ AsyncTranslogFSync getFsyncTask() { // for tests return fsyncTask; } - AsyncGlobalCheckpointTask getGlobalCheckpointTask() { - return globalCheckpointTask; + AsyncTrimTranslogTask getTrimTranslogTask() { // for tests + return trimTranslogTask; } /** diff --git a/server/src/main/java/org/elasticsearch/index/engine/NoOpEngine.java b/server/src/main/java/org/elasticsearch/index/engine/NoOpEngine.java index 7f474d1be24c7..007a13351dfd9 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/NoOpEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/NoOpEngine.java @@ -27,16 +27,24 @@ import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.concurrent.ReleasableLock; +import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.index.translog.TranslogConfig; +import org.elasticsearch.index.translog.TranslogDeletionPolicy; import java.io.IOException; import java.io.UncheckedIOException; import java.util.List; +import java.util.Map; import java.util.function.Function; /** * NoOpEngine is an engine implementation that does nothing but the bare minimum * required in order to have an engine. All attempts to do something (search, - * index, get), throw {@link UnsupportedOperationException}. + * index, get), throw {@link UnsupportedOperationException}. However, NoOpEngine + * allows to trim any existing translog files through the usage of the + * {{@link #trimUnreferencedTranslogFiles()}} method. */ public final class NoOpEngine extends ReadOnlyEngine { @@ -116,4 +124,52 @@ public SegmentsStats segmentsStats(boolean includeSegmentFileSizes, boolean incl return super.segmentsStats(includeSegmentFileSizes, includeUnloadedSegments); } } + + /** + * This implementation will trim existing translog files using a {@link TranslogDeletionPolicy} + * that retains nothing but the last translog generation from safe commit. + */ + @Override + public void trimUnreferencedTranslogFiles() { + final Store store = this.engineConfig.getStore(); + store.incRef(); + try (ReleasableLock lock = readLock.acquire()) { + ensureOpen(); + final List commits = DirectoryReader.listCommits(store.directory()); + if (commits.size() == 1) { + final Map commitUserData = getLastCommittedSegmentInfos().getUserData(); + final String translogUuid = commitUserData.get(Translog.TRANSLOG_UUID_KEY); + if (translogUuid == null) { + throw new IllegalStateException("commit doesn't contain translog unique id"); + } + if (commitUserData.containsKey(Translog.TRANSLOG_GENERATION_KEY) == false) { + throw new IllegalStateException("commit doesn't contain translog generation id"); + } + final long lastCommitGeneration = Long.parseLong(commitUserData.get(Translog.TRANSLOG_GENERATION_KEY)); + final TranslogConfig translogConfig = engineConfig.getTranslogConfig(); + final long minTranslogGeneration = Translog.readMinTranslogGeneration(translogConfig.getTranslogPath(), translogUuid); + + if (minTranslogGeneration < lastCommitGeneration) { + // a translog deletion policy that retains nothing but the last translog generation from safe commit + final TranslogDeletionPolicy translogDeletionPolicy = new TranslogDeletionPolicy(-1, -1); + translogDeletionPolicy.setTranslogGenerationOfLastCommit(lastCommitGeneration); + translogDeletionPolicy.setMinTranslogGenerationForRecovery(lastCommitGeneration); + + try (Translog translog = new Translog(translogConfig, translogUuid, translogDeletionPolicy, + engineConfig.getGlobalCheckpointSupplier(), engineConfig.getPrimaryTermSupplier(), seqNo -> {})) { + translog.trimUnreferencedReaders(); + } + } + } + } catch (final Exception e) { + try { + failEngine("translog trimming failed", e); + } catch (Exception inner) { + e.addSuppressed(inner); + } + throw new EngineException(shardId, "failed to trim translog", e); + } finally { + store.decRef(); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 2d4030a51ce3d..af4d621448ad7 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -42,12 +42,15 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.nio.file.Path; import java.util.Collection; import java.util.Collections; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.index.shard.IndexShardTestCase.getEngine; import static org.elasticsearch.test.InternalSettingsPlugin.TRANSLOG_RETENTION_CHECK_INTERVAL_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.core.IsEqual.equalTo; @@ -370,7 +373,7 @@ public void testAsyncTranslogTrimActuallyWorks() throws Exception { .build(); IndexService indexService = createIndex("test", settings); ensureGreen("test"); - assertTrue(indexService.getRefreshTask().mustReschedule()); + assertTrue(indexService.getTrimTranslogTask().mustReschedule()); client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); client().admin().indices().prepareFlush("test").get(); client().admin().indices().prepareUpdateSettings("test") @@ -382,6 +385,48 @@ public void testAsyncTranslogTrimActuallyWorks() throws Exception { assertBusy(() -> assertThat(IndexShardTestCase.getTranslog(shard).totalOperations(), equalTo(0))); } + public void testAsyncTranslogTrimTaskOnClosedIndex() throws Exception { + final String indexName = "test"; + IndexService indexService = createIndex(indexName, Settings.builder() + .put(TRANSLOG_RETENTION_CHECK_INTERVAL_SETTING.getKey(), "100ms") + .build()); + + Translog translog = IndexShardTestCase.getTranslog(indexService.getShard(0)); + final Path translogPath = translog.getConfig().getTranslogPath(); + final String translogUuid = translog.getTranslogUUID(); + + final int numDocs = scaledRandomIntBetween(10, 100); + for (int i = 0; i < numDocs; i++) { + client().prepareIndex().setIndex(indexName).setId(String.valueOf(i)).setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + if (randomBoolean()) { + client().admin().indices().prepareFlush(indexName).get(); + } + } + assertThat(translog.totalOperations(), equalTo(numDocs)); + assertThat(translog.stats().estimatedNumberOfOperations(), equalTo(numDocs)); + assertAcked(client().admin().indices().prepareClose("test")); + + indexService = getInstanceFromNode(IndicesService.class).indexServiceSafe(indexService.index()); + assertTrue(indexService.getTrimTranslogTask().mustReschedule()); + + final long lastCommitedTranslogGeneration; + try (Engine.IndexCommitRef indexCommitRef = getEngine(indexService.getShard(0)).acquireLastIndexCommit(false)) { + Map lastCommittedUserData = indexCommitRef.getIndexCommit().getUserData(); + lastCommitedTranslogGeneration = Long.parseLong(lastCommittedUserData.get(Translog.TRANSLOG_GENERATION_KEY)); + } + assertBusy(() -> { + long minTranslogGen = Translog.readMinTranslogGeneration(translogPath, translogUuid); + assertThat(minTranslogGen, equalTo(lastCommitedTranslogGeneration)); + }); + + assertAcked(client().admin().indices().prepareOpen("test")); + + indexService = getInstanceFromNode(IndicesService.class).indexServiceSafe(indexService.index()); + translog = IndexShardTestCase.getTranslog(indexService.getShard(0)); + assertThat(translog.totalOperations(), equalTo(0)); + assertThat(translog.stats().estimatedNumberOfOperations(), equalTo(0)); + } + public void testIllegalFsyncInterval() { Settings settings = Settings.builder() .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "0ms") // disable diff --git a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java index 6f74ac23a8e85..f45eab0e05778 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogDeletionPolicy; import org.elasticsearch.test.IndexSettingsModule; @@ -42,6 +43,7 @@ import java.io.UncheckedIOException; import java.nio.file.Path; import java.util.Collections; +import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import static org.hamcrest.Matchers.equalTo; @@ -83,7 +85,7 @@ public void testNoopAfterRegularEngine() throws IOException { tracker.updateLocalCheckpoint(allocationId.getId(), i); } - flushAndTrimTranslog(engine); + engine.flush(true, true); long localCheckpoint = engine.getPersistedLocalCheckpoint(); long maxSeqNo = engine.getSeqNoStats(100L).getMaxSeqNo(); @@ -159,6 +161,45 @@ public void testNoOpEngineStats() throws Exception { } } + public void testTrimUnreferencedTranslogFiles() throws Exception { + final ReplicationTracker tracker = (ReplicationTracker) engine.config().getGlobalCheckpointSupplier(); + ShardRouting routing = TestShardRouting.newShardRouting("test", shardId.id(), "node", + null, true, ShardRoutingState.STARTED, allocationId); + IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(shardId).addShard(routing).build(); + tracker.updateFromMaster(1L, Collections.singleton(allocationId.getId()), table); + tracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); + + final int numDocs = scaledRandomIntBetween(10, 3000); + for (int i = 0; i < numDocs; i++) { + engine.index(indexForDoc(createParsedDoc(Integer.toString(i), null))); + if (rarely()) { + engine.flush(); + } + tracker.updateLocalCheckpoint(allocationId.getId(), i); + } + engine.flush(true, true); + + final String translogUuid = engine.getTranslog().getTranslogUUID(); + final long minFileGeneration = engine.getTranslog().getMinFileGeneration(); + final long currentFileGeneration = engine.getTranslog().currentFileGeneration(); + engine.close(); + + final NoOpEngine noOpEngine = new NoOpEngine(noOpConfig(INDEX_SETTINGS, store, primaryTranslogDir, tracker)); + final Path translogPath = noOpEngine.config().getTranslogConfig().getTranslogPath(); + + final long lastCommitedTranslogGeneration; + try (Engine.IndexCommitRef indexCommitRef = noOpEngine.acquireLastIndexCommit(false)) { + Map lastCommittedUserData = indexCommitRef.getIndexCommit().getUserData(); + lastCommitedTranslogGeneration = Long.parseLong(lastCommittedUserData.get(Translog.TRANSLOG_GENERATION_KEY)); + assertThat(lastCommitedTranslogGeneration, equalTo(currentFileGeneration)); + } + + assertThat(Translog.readMinTranslogGeneration(translogPath, translogUuid), equalTo(minFileGeneration)); + noOpEngine.trimUnreferencedTranslogFiles(); + assertThat(Translog.readMinTranslogGeneration(translogPath, translogUuid), equalTo(lastCommitedTranslogGeneration)); + noOpEngine.close(); + } + private void flushAndTrimTranslog(final InternalEngine engine) { engine.flush(true, true); final TranslogDeletionPolicy deletionPolicy = engine.getTranslog().getDeletionPolicy(); From 1f3a45cfac90b938388c821be39639982832af9f Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 28 Jun 2019 08:45:57 -0700 Subject: [PATCH 065/140] Convert some action.execute cases to using NodeClient (#43715) There are several cases where execute is called on actions directly, mostly within other action implementations. Now that transport client is gone, these internal actions can be registered just like normal actions, and called with the NodeClient to execute locally on the same node. This commit converts a few of these cases for shard level actions to use this approach. --- .../org/elasticsearch/action/Action2.java | 47 +++++++++++++++++++ .../elasticsearch/action/ActionModule.java | 28 +++++------ .../get/TransportGetFieldMappingsAction.java | 12 +++-- .../TransportGetFieldMappingsIndexAction.java | 3 ++ .../action/bulk/TransportBulkAction.java | 12 ++--- .../action/bulk/TransportShardBulkAction.java | 7 +++ .../TransportFieldCapabilitiesAction.java | 15 +++--- ...TransportFieldCapabilitiesIndexAction.java | 4 ++ .../action/get/TransportMultiGetAction.java | 9 ++-- .../get/TransportShardMultiGetAction.java | 3 ++ .../TransportMultiTermVectorsAction.java | 9 ++-- .../TransportShardMultiTermsVectorAction.java | 7 +++ .../elasticsearch/plugins/ActionPlugin.java | 25 ++-------- ...ActionIndicesThatCannotBeCreatedTests.java | 2 +- .../bulk/TransportBulkActionIngestTests.java | 2 +- .../action/bulk/TransportBulkActionTests.java | 2 +- .../bulk/TransportBulkActionTookTests.java | 4 -- .../get/TransportMultiGetActionTests.java | 4 +- .../TransportMultiTermVectorsActionTests.java | 4 +- .../snapshots/SnapshotResiliencyTests.java | 9 ++-- 20 files changed, 129 insertions(+), 79 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/action/Action2.java diff --git a/server/src/main/java/org/elasticsearch/action/Action2.java b/server/src/main/java/org/elasticsearch/action/Action2.java new file mode 100644 index 0000000000000..791e0a2e42274 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/Action2.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + + +import org.elasticsearch.common.io.stream.Writeable; + +/** + * An action for which the response class implements {@link org.elasticsearch.common.io.stream.Writeable}. + */ +public class Action2 extends Action { + private final Writeable.Reader responseReader; + + public Action2(String name, Writeable.Reader responseReader) { + super(name); + this.responseReader = responseReader; + } + + @Override + public Response newResponse() { + throw new UnsupportedOperationException(); + } + + /** + * Get a reader that can create a new instance of the class from a {@link org.elasticsearch.common.io.stream.StreamInput} + */ + public Writeable.Reader getResponseReader() { + return responseReader; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 746ffd29213cc..401f777cdb7cd 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -412,9 +412,8 @@ public void register(ActionHandler handler) { } public void register( - Action action, Class> transportAction, - Class... supportTransportActions) { - register(new ActionHandler<>(action, transportAction, supportTransportActions)); + Action action, Class> transportAction) { + register(new ActionHandler<>(action, transportAction)); } } ActionRegistry actions = new ActionRegistry(); @@ -460,8 +459,8 @@ public void reg actions.register(OpenIndexAction.INSTANCE, TransportOpenIndexAction.class); actions.register(CloseIndexAction.INSTANCE, TransportCloseIndexAction.class); actions.register(GetMappingsAction.INSTANCE, TransportGetMappingsAction.class); - actions.register(GetFieldMappingsAction.INSTANCE, TransportGetFieldMappingsAction.class, - TransportGetFieldMappingsIndexAction.class); + actions.register(GetFieldMappingsAction.INSTANCE, TransportGetFieldMappingsAction.class); + actions.register(TransportGetFieldMappingsIndexAction.ACTION_INSTANCE, TransportGetFieldMappingsIndexAction.class); actions.register(PutMappingAction.INSTANCE, TransportPutMappingAction.class); actions.register(IndicesAliasesAction.INSTANCE, TransportIndicesAliasesAction.class); actions.register(UpdateSettingsAction.INSTANCE, TransportUpdateSettingsAction.class); @@ -484,14 +483,14 @@ public void reg actions.register(IndexAction.INSTANCE, TransportIndexAction.class); actions.register(GetAction.INSTANCE, TransportGetAction.class); actions.register(TermVectorsAction.INSTANCE, TransportTermVectorsAction.class); - actions.register(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class, - TransportShardMultiTermsVectorAction.class); + actions.register(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class); + actions.register(TransportShardMultiTermsVectorAction.ACTION_INSTANCE, TransportShardMultiTermsVectorAction.class); actions.register(DeleteAction.INSTANCE, TransportDeleteAction.class); actions.register(UpdateAction.INSTANCE, TransportUpdateAction.class); - actions.register(MultiGetAction.INSTANCE, TransportMultiGetAction.class, - TransportShardMultiGetAction.class); - actions.register(BulkAction.INSTANCE, TransportBulkAction.class, - TransportShardBulkAction.class); + actions.register(MultiGetAction.INSTANCE, TransportMultiGetAction.class); + actions.register(TransportShardMultiGetAction.ACTION_INSTANCE, TransportShardMultiGetAction.class); + actions.register(BulkAction.INSTANCE, TransportBulkAction.class); + actions.register(TransportShardBulkAction.ACTION_INSTANCE, TransportShardBulkAction.class); actions.register(SearchAction.INSTANCE, TransportSearchAction.class); actions.register(SearchScrollAction.INSTANCE, TransportSearchScrollAction.class); actions.register(MultiSearchAction.INSTANCE, TransportMultiSearchAction.class); @@ -505,8 +504,8 @@ public void reg actions.register(GetStoredScriptAction.INSTANCE, TransportGetStoredScriptAction.class); actions.register(DeleteStoredScriptAction.INSTANCE, TransportDeleteStoredScriptAction.class); - actions.register(FieldCapabilitiesAction.INSTANCE, TransportFieldCapabilitiesAction.class, - TransportFieldCapabilitiesIndexAction.class); + actions.register(FieldCapabilitiesAction.INSTANCE, TransportFieldCapabilitiesAction.class); + actions.register(TransportFieldCapabilitiesIndexAction.ACTION_INSTANCE, TransportFieldCapabilitiesIndexAction.class); actions.register(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); actions.register(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); @@ -693,9 +692,6 @@ protected void configure() { // bind the action as eager singleton, so the map binder one will reuse it bind(action.getTransportAction()).asEagerSingleton(); transportActionsBinder.addBinding(action.getAction()).to(action.getTransportAction()).asEagerSingleton(); - for (Class supportAction : action.getSupportTransportActions()) { - bind(supportAction).asEagerSingleton(); - } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java index 2ebbf81f693ca..fb5ca53e5dbf7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -39,17 +40,17 @@ public class TransportGetFieldMappingsAction extends HandledTransportAction { private final ClusterService clusterService; - private final TransportGetFieldMappingsIndexAction shardAction; private final IndexNameExpressionResolver indexNameExpressionResolver; + private final NodeClient client; @Inject public TransportGetFieldMappingsAction(TransportService transportService, ClusterService clusterService, - TransportGetFieldMappingsIndexAction shardAction, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + NodeClient client) { super(GetFieldMappingsAction.NAME, transportService, actionFilters, GetFieldMappingsRequest::new); this.clusterService = clusterService; - this.shardAction = shardAction; this.indexNameExpressionResolver = indexNameExpressionResolver; + this.client = client; } @Override @@ -66,7 +67,8 @@ protected void doExecute(Task task, GetFieldMappingsRequest request, final Actio boolean probablySingleFieldRequest = concreteIndices.length == 1 && request.types().length == 1 && request.fields().length == 1; for (final String index : concreteIndices) { GetFieldMappingsIndexRequest shardRequest = new GetFieldMappingsIndexRequest(request, index, probablySingleFieldRequest); - shardAction.execute(shardRequest, new ActionListener() { + + client.executeLocally(TransportGetFieldMappingsIndexAction.ACTION_INSTANCE, shardRequest, new ActionListener<>() { @Override public void onResponse(GetFieldMappingsResponse result) { indexResponses.set(indexCounter.getAndIncrement(), result); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 61a598c361cc9..1272846c70c26 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -21,6 +21,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.Action2; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -63,6 +65,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAction { private static final String ACTION_NAME = GetFieldMappingsAction.NAME + "[index]"; + public static final Action ACTION_INSTANCE = new Action2<>(ACTION_NAME, GetFieldMappingsResponse::new); protected final ClusterService clusterService; private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 6a229667745d3..faf5834234ceb 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -96,7 +96,6 @@ public class TransportBulkAction extends HandledTransportAction) BulkRequest::new, ThreadPool.Names.WRITE); Objects.requireNonNull(relativeTimeProvider); this.threadPool = threadPool; this.clusterService = clusterService; this.ingestService = ingestService; - this.shardBulkAction = shardBulkAction; this.autoCreateIndex = autoCreateIndex; this.relativeTimeProvider = relativeTimeProvider; this.ingestForwarder = new IngestActionForwarder(transportService); @@ -440,7 +436,7 @@ protected void doRun() throws Exception { if (task != null) { bulkShardRequest.setParentTask(nodeId, task.getId()); } - shardBulkAction.execute(bulkShardRequest, new ActionListener() { + client.executeLocally(TransportShardBulkAction.ACTION_INSTANCE, bulkShardRequest, new ActionListener<>() { @Override public void onResponse(BulkShardResponse bulkShardResponse) { for (BulkItemResponse bulkItemResponse : bulkShardResponse.getResponses()) { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 570ee50a841c1..771010d673d67 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -24,6 +24,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.MessageSupplier; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.DocWriteRequest; @@ -77,6 +78,12 @@ public class TransportShardBulkAction extends TransportWriteAction { public static final String ACTION_NAME = BulkAction.NAME + "[s]"; + public static final Action ACTION_INSTANCE = new Action<>(ACTION_NAME) { + @Override + public BulkShardResponse newResponse() { + return new BulkShardResponse(); + } + }; private static final Logger logger = LogManager.getLogger(TransportShardBulkAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index ffb98dd5224ba..fa261c0e309bb 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -47,21 +48,20 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction { private final ThreadPool threadPool; + private final NodeClient client; private final ClusterService clusterService; - private final TransportFieldCapabilitiesIndexAction shardAction; private final RemoteClusterService remoteClusterService; private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject - public TransportFieldCapabilitiesAction(TransportService transportService, - ClusterService clusterService, ThreadPool threadPool, - TransportFieldCapabilitiesIndexAction shardAction, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + public TransportFieldCapabilitiesAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool, + NodeClient client, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { super(FieldCapabilitiesAction.NAME, transportService, actionFilters, FieldCapabilitiesRequest::new); this.threadPool = threadPool; + this.client = client; this.clusterService = clusterService; this.remoteClusterService = transportService.getRemoteClusterService(); - this.shardAction = shardAction; this.indexNameExpressionResolver = indexNameExpressionResolver; } @@ -108,7 +108,8 @@ public void onFailure(Exception e) { } }; for (String index : concreteIndices) { - shardAction.execute(new FieldCapabilitiesIndexRequest(request.fields(), index, localIndices), innerListener); + client.executeLocally(TransportFieldCapabilitiesIndexAction.ACTION_INSTANCE, + new FieldCapabilitiesIndexRequest(request.fields(), index, localIndices), innerListener); } // this is the cross cluster part of this API - we force the other cluster to not merge the results but instead diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index 274633b12a613..e7eadf9af0eb9 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -19,6 +19,8 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.Action2; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; @@ -47,6 +49,8 @@ public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardA FieldCapabilitiesIndexResponse> { private static final String ACTION_NAME = FieldCapabilitiesAction.NAME + "[index]"; + public static final Action ACTION_INSTANCE = + new Action2<>(ACTION_NAME, FieldCapabilitiesIndexResponse::new); private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index 375bdc18b605a..0d6a7aed93e92 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -40,16 +41,16 @@ public class TransportMultiGetAction extends HandledTransportAction { private final ClusterService clusterService; - private final TransportShardMultiGetAction shardAction; + private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject public TransportMultiGetAction(TransportService transportService, ClusterService clusterService, - TransportShardMultiGetAction shardAction, ActionFilters actionFilters, + NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver resolver) { super(MultiGetAction.NAME, transportService, actionFilters, MultiGetRequest::new); this.clusterService = clusterService; - this.shardAction = shardAction; + this.client = client; this.indexNameExpressionResolver = resolver; } @@ -105,7 +106,7 @@ protected void executeShardAction(ActionListener listener, final AtomicInteger counter = new AtomicInteger(shardRequests.size()); for (final MultiGetShardRequest shardRequest : shardRequests.values()) { - shardAction.execute(shardRequest, new ActionListener() { + client.executeLocally(TransportShardMultiGetAction.ACTION_INSTANCE, shardRequest, new ActionListener<>() { @Override public void onResponse(MultiGetShardResponse response) { for (int i = 0; i < response.locations.size(); i++) { diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 9b8ea6bd6cac5..a639795946e21 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -20,6 +20,8 @@ package org.elasticsearch.action.get; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.Action2; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -40,6 +42,7 @@ public class TransportShardMultiGetAction extends TransportSingleShardAction { private static final String ACTION_NAME = MultiGetAction.NAME + "[shard]"; + public static final Action ACTION_INSTANCE = new Action2<>(ACTION_NAME, MultiGetShardResponse::new); private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index 1d05c760b1e3b..b66144e0d6c91 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -41,16 +42,16 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction { private final ClusterService clusterService; - private final TransportShardMultiTermsVectorAction shardAction; + private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject public TransportMultiTermVectorsAction(TransportService transportService, ClusterService clusterService, - TransportShardMultiTermsVectorAction shardAction, ActionFilters actionFilters, + NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(MultiTermVectorsAction.NAME, transportService, actionFilters, MultiTermVectorsRequest::new); this.clusterService = clusterService; - this.shardAction = shardAction; + this.client = client; this.indexNameExpressionResolver = indexNameExpressionResolver; } @@ -106,7 +107,7 @@ protected void executeShardAction(ActionListener liste final AtomicInteger counter = new AtomicInteger(shardRequests.size()); for (final MultiTermVectorsShardRequest shardRequest : shardRequests.values()) { - shardAction.execute(shardRequest, new ActionListener() { + client.executeLocally(TransportShardMultiTermsVectorAction.ACTION_INSTANCE, shardRequest, new ActionListener<>() { @Override public void onResponse(MultiTermVectorsShardResponse response) { for (int i = 0; i < response.locations.size(); i++) { diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java index d7ef6d262bcce..aca955748df27 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.termvectors; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -43,6 +44,12 @@ public class TransportShardMultiTermsVectorAction extends private final IndicesService indicesService; private static final String ACTION_NAME = MultiTermVectorsAction.NAME + "[shard]"; + public static final Action ACTION_INSTANCE = new Action<>(ACTION_NAME) { + @Override + public MultiTermVectorsShardResponse newResponse() { + return new MultiTermVectorsShardResponse(); + } + }; @Inject public TransportShardMultiTermsVectorAction(ClusterService clusterService, TransportService transportService, diff --git a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index b85904178a2ce..8d6fb16440ca7 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -27,10 +27,8 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -132,17 +130,13 @@ default UnaryOperator getRestHandlerWrapper(ThreadContext threadCon final class ActionHandler { private final Action action; private final Class> transportAction; - private final Class[] supportTransportActions; /** - * Create a record of an action, the {@linkplain TransportAction} that handles it, and any supporting {@linkplain TransportActions} - * that are needed by that {@linkplain TransportAction}. + * Create a record of an action, the {@linkplain TransportAction} that handles it. */ - public ActionHandler(Action action, Class> transportAction, - Class... supportTransportActions) { + public ActionHandler(Action action, Class> transportAction) { this.action = action; this.transportAction = transportAction; - this.supportTransportActions = supportTransportActions; } public Action getAction() { @@ -153,17 +147,9 @@ public Class> getTransportAction() return transportAction; } - public Class[] getSupportTransportActions() { - return supportTransportActions; - } - @Override public String toString() { - StringBuilder b = new StringBuilder().append(action.name()).append(" is handled by ").append(transportAction.getName()); - if (supportTransportActions.length > 0) { - b.append('[').append(Strings.arrayToCommaDelimitedString(supportTransportActions)).append(']'); - } - return b.toString(); + return action.name() + " is handled by " + transportAction.getName(); } @Override @@ -173,13 +159,12 @@ public boolean equals(Object obj) { } ActionHandler other = (ActionHandler) obj; return Objects.equals(action, other.action) - && Objects.equals(transportAction, other.transportAction) - && Objects.deepEquals(supportTransportActions, other.supportTransportActions); + && Objects.equals(transportAction, other.transportAction); } @Override public int hashCode() { - return Objects.hash(action, transportAction, supportTransportActions); + return Objects.hash(action, transportAction); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index f213b523fbfaf..ee721377afe67 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -110,7 +110,7 @@ private void indicesThatCannotBeCreatedTestCase(Set expected, final ExecutorService direct = EsExecutors.newDirectExecutorService(); when(threadPool.executor(anyString())).thenReturn(direct); TransportBulkAction action = new TransportBulkAction(threadPool, mock(TransportService.class), clusterService, - null, null, null, mock(ActionFilters.class), null, null) { + null, null, mock(ActionFilters.class), null, null) { @Override void executeBulk(Task task, BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, AtomicArray responses, Map indicesThatCannotBeCreated) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index 3f3e20d95d328..95afbbf54f573 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -133,7 +133,7 @@ class TestTransportBulkAction extends TransportBulkAction { TestTransportBulkAction() { super(threadPool, transportService, clusterService, ingestService, - null, null, new ActionFilters(Collections.emptySet()), null, + null, new ActionFilters(Collections.emptySet()), null, new AutoCreateIndex( SETTINGS, new ClusterSettings(SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new IndexNameExpressionResolver() diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 162ef56553df4..65e4b1ee195ec 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -59,7 +59,7 @@ class TestTransportBulkAction extends TransportBulkAction { boolean indexCreated = false; // set when the "real" index is created TestTransportBulkAction() { - super(TransportBulkActionTests.this.threadPool, transportService, clusterService, null, null, + super(TransportBulkActionTests.this.threadPool, transportService, clusterService, null, null, new ActionFilters(Collections.emptySet()), new Resolver(), new AutoCreateIndex(Settings.EMPTY, clusterService.getClusterSettings(), new Resolver())); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index fbf32578d8044..192660d712a19 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -115,7 +115,6 @@ void doExecute(Action action, Request request, ActionListener listener, @@ -195,7 +195,7 @@ public void testTransportMultiGetAction_withMissingRouting() { request.add(new MultiGetRequest.Item("index2", "_doc", "2")); final AtomicBoolean shardActionInvoked = new AtomicBoolean(false); - transportAction = new TransportMultiGetAction(transportService, clusterService, shardAction, + transportAction = new TransportMultiGetAction(transportService, clusterService, client, new ActionFilters(emptySet()), new Resolver()) { @Override protected void executeShardAction(final ActionListener listener, diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java index 35e0de175b2fa..6ea24123b4796 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java @@ -172,7 +172,7 @@ public void testTransportMultiGetAction() { request.add(new TermVectorsRequest("index2", "2")); final AtomicBoolean shardActionInvoked = new AtomicBoolean(false); - transportAction = new TransportMultiTermVectorsAction(transportService, clusterService, shardAction, + transportAction = new TransportMultiTermVectorsAction(transportService, clusterService, client, new ActionFilters(emptySet()), new Resolver()) { @Override protected void executeShardAction(final ActionListener listener, @@ -197,7 +197,7 @@ public void testTransportMultiGetAction_withMissingRouting() { request.add(new TermVectorsRequest("index2", "2")); final AtomicBoolean shardActionInvoked = new AtomicBoolean(false); - transportAction = new TransportMultiTermVectorsAction(transportService, clusterService, shardAction, + transportAction = new TransportMultiTermVectorsAction(transportService, clusterService, client, new ActionFilters(emptySet()), new Resolver()) { @Override protected void executeShardAction(final ActionListener listener, diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index b9d7c2c87d361..07da055185a84 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1018,18 +1018,19 @@ allocationService, new AliasValidator(), environment, indexScopedSettings, )); final MappingUpdatedAction mappingUpdatedAction = new MappingUpdatedAction(settings, clusterSettings); mappingUpdatedAction.setClient(client); - final TransportShardBulkAction transportShardBulkAction = new TransportShardBulkAction(settings, transportService, - clusterService, indicesService, threadPool, shardStateAction, mappingUpdatedAction, new UpdateHelper(scriptService), - actionFilters, indexNameExpressionResolver); actions.put(BulkAction.INSTANCE, new TransportBulkAction(threadPool, transportService, clusterService, new IngestService( clusterService, threadPool, environment, scriptService, new AnalysisModule(environment, Collections.emptyList()).getAnalysisRegistry(), Collections.emptyList()), - transportShardBulkAction, client, actionFilters, indexNameExpressionResolver, + client, actionFilters, indexNameExpressionResolver, new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver) )); + final TransportShardBulkAction transportShardBulkAction = new TransportShardBulkAction(settings, transportService, + clusterService, indicesService, threadPool, shardStateAction, mappingUpdatedAction, new UpdateHelper(scriptService), + actionFilters, indexNameExpressionResolver); + actions.put(TransportShardBulkAction.ACTION_INSTANCE, transportShardBulkAction); final RestoreService restoreService = new RestoreService( clusterService, repositoriesService, allocationService, metaDataCreateIndexService, From b490eab3a548d34b8621dd28381cc15a36df564f Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Fri, 28 Jun 2019 12:56:22 -0400 Subject: [PATCH 066/140] [DOCS] Rewrite `terms_set` query (#43060) --- .../query-dsl/terms-set-query.asciidoc | 248 +++++++++++++----- 1 file changed, 179 insertions(+), 69 deletions(-) diff --git a/docs/reference/query-dsl/terms-set-query.asciidoc b/docs/reference/query-dsl/terms-set-query.asciidoc index 3ebfb672e205f..0f097e494bfda 100644 --- a/docs/reference/query-dsl/terms-set-query.asciidoc +++ b/docs/reference/query-dsl/terms-set-query.asciidoc @@ -1,121 +1,231 @@ [[query-dsl-terms-set-query]] === Terms Set Query -Returns any documents that match with at least one or more of the -provided terms. The terms are not analyzed and thus must match exactly. -The number of terms that must match varies per document and is either -controlled by a minimum should match field or computed per document in -a minimum should match script. +Returns documents that contain a minimum number of *exact* terms in a provided +field. -The field that controls the number of required terms that must match must -be a number field: +The `terms_set` query is the same as the <>, except you can define the number of matching terms required to +return a document. For example: + +* A field, `programming_languages`, contains a list of known programming +languages, such as `c++`, `java`, or `php` for job candidates. You can use the +`terms_set` query to return documents that match at least two of these +languages. + +* A field, `permissions`, contains a list of possible user permissions for an +application. You can use the `terms_set` query to return documents that +match a subset of these permissions. + +[[terms-set-query-ex-request]] +==== Example request + +[[terms-set-query-ex-request-index-setup]] +===== Index setup +In most cases, you'll need to include a <> field mapping in +your index to use the `terms_set` query. This numeric field contains the +number of matching terms required to return a document. + +To see how you can set up an index for the `terms_set` query, try the +following example. + +. Create an index, `job-candidates`, with the following field mappings: ++ +-- + +* `name`, a <> field. This field contains the name of the +job candidate. + +* `programming_languages`, a <> field. This field contains +programming languages known by the job candidate. + +* `required_matches`, a <> `long` field. This field contains +the number of matching terms required to return a document. [source,js] --------------------------------------------------- -PUT /my-index +---- +PUT /job-candidates { "mappings": { "properties": { + "name": { + "type": "keyword" + }, + "programming_languages": { + "type": "keyword" + }, "required_matches": { "type": "long" } } } } +---- +// CONSOLE +// TESTSETUP + +-- -PUT /my-index/_doc/1?refresh +. Index a document with an ID of `1` and the following values: ++ +-- + +* `Jane Smith` in the `name` field. + +* `["c++", "java"]` in the `programming_languages` field. + +* `2` in the `required_matches` field. + +Include the `?refresh` parameter so the document is immediately available for +search. + +[source,js] +---- +PUT /job-candidates/_doc/1?refresh { - "codes": ["ghi", "jkl"], + "name": "Jane Smith", + "programming_languages": ["c++", "java"], "required_matches": 2 } +---- +// CONSOLE + +-- + +. Index another document with an ID of `2` and the following values: ++ +-- + +* `Jason Response` in the `name` field. + +* `["java", "php"]` in the `programming_languages` field. + +* `2` in the `required_matches` field. -PUT /my-index/_doc/2?refresh +[source,js] +---- +PUT /job-candidates/_doc/2?refresh { - "codes": ["def", "ghi"], + "name": "Jason Response", + "programming_languages": ["java", "php"], "required_matches": 2 } --------------------------------------------------- +---- // CONSOLE -// TESTSETUP -An example that uses the minimum should match field: +-- + +You can now use the `required_matches` field value as the number of +matching terms required to return a document in the `terms_set` query. + +[[terms-set-query-ex-request-query]] +===== Example query + +The following search returns documents where the `programming_languages` field +contains at least two of the following terms: + +* `c++` +* `java` +* `php` + +The `minimum_should_match_field` is `required_matches`. This means the +number of matching terms required is `2`, the value of the `required_matches` +field. [source,js] --------------------------------------------------- -GET /my-index/_search +---- +GET /job-candidates/_search { "query": { "terms_set": { - "codes" : { - "terms" : ["abc", "def", "ghi"], + "programming_languages": { + "terms": ["c++", "java", "php"], "minimum_should_match_field": "required_matches" } } } } --------------------------------------------------- +---- // CONSOLE -Response: +[[terms-set-top-level-params]] +==== Top-level parameters for `terms_set` -[source,js] --------------------------------------------------- -{ - "took": 13, - "timed_out": false, - "_shards": { - "total": 1, - "successful": 1, - "skipped" : 0, - "failed": 0 - }, - "hits": { - "total" : { - "value": 1, - "relation": "eq" - }, - "max_score": 0.87546873, - "hits": [ - { - "_index": "my-index", - "_type": "_doc", - "_id": "2", - "_score": 0.87546873, - "_source": { - "codes": ["def", "ghi"], - "required_matches": 2 - } - } - ] - } -} --------------------------------------------------- -// TESTRESPONSE[s/"took": 13,/"took": "$body.took",/] +``:: +Field you wish to search. + +[[terms-set-field-params]] +==== Parameters for `` -Scripts can also be used to control how many terms are required to match -in a more dynamic way. For example a create date or a popularity field -can be used as basis for the number of required terms to match. +`terms`:: ++ +-- +Array of terms you wish to find in the provided ``. To return a document, +a required number of terms must exactly match the field values, including +whitespace and capitalization. -Also the `params.num_terms` parameter is available in the script to indicate the -number of terms that have been specified. +The required number of matching terms is defined in the +`minimum_should_match_field` or `minimum_should_match_script` parameter. +-- -An example that always limits the number of required terms to match to never -become larger than the number of terms specified: +`minimum_should_match_field`:: +<> field containing the number of matching terms +required to return a document. + +`minimum_should_match_script`:: ++ +-- +Custom script containing the number of matching terms required to return a +document. + +For parameters and valid values, see <>. + +For an example query using the `minimum_should_match_script` parameter, see +<>. +-- + +[[terms-set-query-notes]] +==== Notes + +[[terms-set-query-script]] +===== How to use the `minimum_should_match_script` parameter +You can use `minimum_should_match_script` to define the required number of +matching terms using a script. This is useful if you need to set the number of +required terms dynamically. + +[[terms-set-query-script-ex]] +====== Example query using `minimum_should_match_script` + +The following search returns documents where the `programming_languages` field +contains at least two of the following terms: + +* `c++` +* `java` +* `php` + +The `source` parameter of this query indicates: + +* The required number of terms to match cannot exceed `params.num_terms`, the +number of terms provided in the `terms` field. +* The required number of terms to match is `2`, the value of the +`required_matches` field. [source,js] --------------------------------------------------- -GET /my-index/_search +---- +GET /job-candidates/_search { "query": { "terms_set": { - "codes" : { - "terms" : ["abc", "def", "ghi"], + "programming_languages": { + "terms": ["c++", "java", "php"], "minimum_should_match_script": { "source": "Math.min(params.num_terms, doc['required_matches'].value)" - } + }, + "boost": 1.0 } } } } --------------------------------------------------- -// CONSOLE +---- +// CONSOLE \ No newline at end of file From c042a10d2fe17a9c7c168431fef88693cbc387bc Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 28 Jun 2019 10:42:35 -0700 Subject: [PATCH 067/140] Re-enable bwc tests (#43723) relates #43658 --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index d9437ba2a3a99..5c1fe80668283 100644 --- a/build.gradle +++ b/build.gradle @@ -160,8 +160,8 @@ task verifyVersions { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = false -final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/43658" /* place a PR link here when committing bwc changes */ +boolean bwc_tests_enabled = true +final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") From 75eb09e8c8cbe3ac22608227171138b6eb6fddc0 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Fri, 28 Jun 2019 20:06:22 +0100 Subject: [PATCH 068/140] [ML] Mark ml-cpp dependency as regularly changing (#43760) Since #41817 was merged the ml-cpp zip file for any given version has been cached indefinitely by Gradle. This is problematic, particularly in the case of the master branch where the version 8.0.0-SNAPSHOT will be in use for more than a year. This change tells Gradle that the ml-cpp zip file is a "changing" dependency, and to check whether it has changed every two hours. Two hours is a compromise between checking on every build and annoying developers with slow internet connections and checking rarely causing bug fixes in the ml-cpp code to take a long time to propagate through to elasticsearch PRs that rely on them. --- x-pack/plugin/ml/build.gradle | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 15f2e3576d7bb..ec35e7fb8849f 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -31,6 +31,7 @@ configurations { substitute module("org.elasticsearch.ml:ml-cpp") with project(":ml-cpp") } } + resolutionStrategy.cacheChangingModulesFor 2, 'hours' } } @@ -62,7 +63,9 @@ dependencies { compile project(':libs:elasticsearch-grok') compile "com.ibm.icu:icu4j:${versions.icu4j}" compile "net.sf.supercsv:super-csv:${versions.supercsv}" - nativeBundle "org.elasticsearch.ml:ml-cpp:${project.version}@zip" + nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}@zip") { + changing = true + } testCompile 'org.ini4j:ini4j:0.5.2' } From 5ac7ec25139f82a70df5bd91c9f0c500fc241967 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 28 Jun 2019 21:14:59 +0200 Subject: [PATCH 069/140] Migrate watcher hlrc response tests to use AbstractResponseTestCase (#43478) Relates to #43472 --- .../client/AbstractResponseTestCase.java | 6 +- .../watcher/ExecuteWatchResponseTests.java | 115 -------------- .../client/watcher/GetWatchResponseTests.java | 144 +++++++----------- .../hlrc/DeleteWatchResponseTests.java | 28 ++-- .../hlrc/ExecuteWatchResponseTests.java | 28 ++-- .../watcher/hlrc/PutWatchResponseTests.java | 32 ++-- 6 files changed, 92 insertions(+), 261 deletions(-) delete mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ExecuteWatchResponseTests.java diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/AbstractResponseTestCase.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/AbstractResponseTestCase.java index 8565ca14a908a..2a91a639a5ac3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/AbstractResponseTestCase.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/AbstractResponseTestCase.java @@ -45,7 +45,7 @@ public final void testFromXContent() throws IOException { final S serverTestInstance = createServerTestInstance(); final XContentType xContentType = randomFrom(XContentType.values()); - final BytesReference bytes = toShuffledXContent(serverTestInstance, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); + final BytesReference bytes = toShuffledXContent(serverTestInstance, xContentType, getParams(), randomBoolean()); final XContent xContent = XContentFactory.xContent(xContentType); final XContentParser parser = xContent.createParser( @@ -62,4 +62,8 @@ public final void testFromXContent() throws IOException { protected abstract void assertInstances(S serverTestInstance, C clientInstance); + protected ToXContent.Params getParams() { + return ToXContent.EMPTY_PARAMS; + } + } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ExecuteWatchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ExecuteWatchResponseTests.java deleted file mode 100644 index 3e0ef4c8a5e5f..0000000000000 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ExecuteWatchResponseTests.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.client.watcher; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.ObjectPath; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.io.InputStream; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -import static org.hamcrest.Matchers.is; - -public class ExecuteWatchResponseTests extends ESTestCase { - - public static final String WATCH_ID_VALUE = "my_watch"; - public static final String NODE_VALUE = "my_node"; - public static final String TRIGGER_TYPE_VALUE = "manual"; - public static final String STATE_VALUE = "executed"; - public static final String STATE_KEY = "state"; - public static final String TRIGGER_EVENT_KEY = "trigger_event"; - public static final String TRIGGER_EVENT_TYPE_KEY = "type"; - public static final String MESSAGES_KEY = "messages"; - public static final String NODE_KEY = "node"; - public static final String WATCH_ID_KEY = "watch_id"; - - public void testFromXContent() throws IOException { - xContentTester(this::createParser, - ExecuteWatchResponseTests::createTestInstance, - this::toXContent, - ExecuteWatchResponse::fromXContent) - .supportsUnknownFields(true) - .assertEqualsConsumer(this::assertEqualInstances) - .assertToXContentEquivalence(false) - .test(); - } - - private void assertEqualInstances(ExecuteWatchResponse expected, ExecuteWatchResponse actual) { - assertThat(expected.getRecordId(), is(actual.getRecordId())); - - // This may have extra json, so lets just assume that if all of the original fields from the creation are there, then its equal - // This is the same code that is in createTestInstance in this class. - Map actualMap = actual.getRecordAsMap(); - assertThat(ObjectPath.eval(WATCH_ID_KEY, actualMap), is(WATCH_ID_VALUE)); - assertThat(ObjectPath.eval(NODE_KEY, actualMap), is(NODE_VALUE)); - List messages = ObjectPath.eval(MESSAGES_KEY, actualMap); - assertThat(messages.size(), is(0)); - assertThat(ObjectPath.eval(TRIGGER_EVENT_KEY + "." + TRIGGER_EVENT_TYPE_KEY, actualMap), is(TRIGGER_TYPE_VALUE)); - assertThat(ObjectPath.eval(STATE_KEY, actualMap), is(STATE_VALUE)); - } - - private XContentBuilder toXContent(BytesReference bytes, XContentBuilder builder) throws IOException { - // EMPTY is safe here because we never use namedObject - try (InputStream stream = bytes.streamInput(); - XContentParser parser = createParser(JsonXContent.jsonXContent, stream)) { - parser.nextToken(); - builder.generator().copyCurrentStructure(parser); - return builder; - } - } - - private XContentBuilder toXContent(ExecuteWatchResponse response, XContentBuilder builder) throws IOException { - builder.startObject(); - builder.field("_id", response.getRecordId()); - builder.field("watch_record"); - toXContent(response.getRecord(), builder); - return builder.endObject(); - } - - private static ExecuteWatchResponse createTestInstance() { - String id = "my_watch_0-2015-06-02T23:17:55.124Z"; - try { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.field(WATCH_ID_KEY, WATCH_ID_VALUE); - builder.field(NODE_KEY, NODE_VALUE); - builder.startArray(MESSAGES_KEY); - builder.endArray(); - builder.startObject(TRIGGER_EVENT_KEY); - builder.field(TRIGGER_EVENT_TYPE_KEY, TRIGGER_TYPE_VALUE); - builder.endObject(); - builder.field(STATE_KEY, STATE_VALUE); - builder.endObject(); - BytesReference bytes = BytesReference.bytes(builder); - return new ExecuteWatchResponse(id, bytes); - } - catch (IOException e) { - throw new AssertionError(e); - } - } -} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/GetWatchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/GetWatchResponseTests.java index 7d919ef5f68c1..b69ea90a49e4a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/GetWatchResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/GetWatchResponseTests.java @@ -19,17 +19,13 @@ package org.elasticsearch.client.watcher; import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.client.AbstractResponseTestCase; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.time.DateUtils; -import org.elasticsearch.common.xcontent.DeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.client.AbstractHlrcStreamableXContentTestCase; import org.elasticsearch.xpack.core.watcher.actions.ActionStatus; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; @@ -37,70 +33,17 @@ import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; import java.io.IOException; -import java.io.InputStream; import java.time.ZonedDateTime; -import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.function.Predicate; -public class GetWatchResponseTests extends - AbstractHlrcStreamableXContentTestCase { +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; - private static final String[] SHUFFLE_FIELDS_EXCEPTION = new String[] { "watch" }; +public class GetWatchResponseTests extends AbstractResponseTestCase { @Override - protected String[] getShuffleFieldsExceptions() { - return SHUFFLE_FIELDS_EXCEPTION; - } - - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap("hide_headers", "false")); - } - - @Override - protected Predicate getRandomFieldsExcludeFilter() { - return f -> f.contains("watch") || f.contains("actions") || f.contains("headers"); - } - - @Override - protected void assertEqualInstances(GetWatchResponse expectedInstance, GetWatchResponse newInstance) { - if (expectedInstance.isFound() && - expectedInstance.getSource().getContentType() != newInstance.getSource().getContentType()) { - /** - * The {@link GetWatchResponse#getContentType()} depends on the content type that - * was used to serialize the main object so we use the same content type than the - * expectedInstance to translate the watch of the newInstance. - */ - XContent from = XContentFactory.xContent(newInstance.getSource().getContentType()); - XContent to = XContentFactory.xContent(expectedInstance.getSource().getContentType()); - final BytesReference newSource; - // It is safe to use EMPTY here because this never uses namedObject - try (InputStream stream = newInstance.getSource().getBytes().streamInput(); - XContentParser parser = XContentFactory.xContent(from.type()).createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) { - parser.nextToken(); - XContentBuilder builder = XContentFactory.contentBuilder(to.type()); - builder.copyCurrentStructure(parser); - newSource = BytesReference.bytes(builder); - } catch (IOException e) { - throw new AssertionError(e); - } - newInstance = new GetWatchResponse(newInstance.getId(), newInstance.getVersion(), - newInstance.getSeqNo(), newInstance.getPrimaryTerm(), - newInstance.getStatus(), new XContentSource(newSource, expectedInstance.getSource().getContentType())); - } - super.assertEqualInstances(expectedInstance, newInstance); - } - - @Override - protected GetWatchResponse createBlankInstance() { - return new GetWatchResponse(); - } - - @Override - protected GetWatchResponse createTestInstance() { + protected GetWatchResponse createServerTestInstance() { String id = randomAlphaOfLength(10); if (LuceneTestCase.rarely()) { return new GetWatchResponse(id); @@ -113,6 +56,34 @@ protected GetWatchResponse createTestInstance() { return new GetWatchResponse(id, version, seqNo, primaryTerm, status, new XContentSource(source, XContentType.JSON)); } + @Override + protected org.elasticsearch.client.watcher.GetWatchResponse doParseToClientInstance(XContentParser parser) throws IOException { + return org.elasticsearch.client.watcher.GetWatchResponse.fromXContent(parser); + } + + @Override + protected void assertInstances(GetWatchResponse serverTestInstance, org.elasticsearch.client.watcher.GetWatchResponse clientInstance) { + assertThat(clientInstance.getId(), equalTo(serverTestInstance.getId())); + assertThat(clientInstance.getSeqNo(), equalTo(serverTestInstance.getSeqNo())); + assertThat(clientInstance.getPrimaryTerm(), equalTo(serverTestInstance.getPrimaryTerm())); + assertThat(clientInstance.getVersion(), equalTo(serverTestInstance.getVersion())); + if (serverTestInstance.getStatus() != null) { + assertThat(convertWatchStatus(clientInstance.getStatus()), equalTo(serverTestInstance.getStatus())); + } else { + assertThat(clientInstance.getStatus(), nullValue()); + } + if (serverTestInstance.getSource() != null) { + assertThat(clientInstance.getSourceAsMap(), equalTo(serverTestInstance.getSource().getAsMap())); + } else { + assertThat(clientInstance.getSource(), nullValue()); + } + } + + @Override + protected ToXContent.Params getParams() { + return new ToXContent.MapParams(Map.of("hide_headers", "false")); + } + private static BytesReference simpleWatch() { try { XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); @@ -179,58 +150,45 @@ private static ActionStatus.Execution randomExecution() { } } - @Override - public org.elasticsearch.client.watcher.GetWatchResponse doHlrcParseInstance(XContentParser parser) throws IOException { - return org.elasticsearch.client.watcher.GetWatchResponse.fromXContent(parser); - } - - @Override - public GetWatchResponse convertHlrcToInternal(org.elasticsearch.client.watcher.GetWatchResponse instance) { - if (instance.isFound()) { - return new GetWatchResponse(instance.getId(), instance.getVersion(), instance.getSeqNo(), instance.getPrimaryTerm(), - convertHlrcToInternal(instance.getStatus()), new XContentSource(instance.getSource(), instance.getContentType())); - } else { - return new GetWatchResponse(instance.getId()); - } - } - - private static WatchStatus convertHlrcToInternal(org.elasticsearch.client.watcher.WatchStatus status) { + private static WatchStatus convertWatchStatus(org.elasticsearch.client.watcher.WatchStatus status) { final Map actions = new HashMap<>(); for (Map.Entry entry : status.getActions().entrySet()) { - actions.put(entry.getKey(), convertHlrcToInternal(entry.getValue())); + actions.put(entry.getKey(), convertActionStatus(entry.getValue())); } return new WatchStatus(status.version(), - convertHlrcToInternal(status.state()), - status.getExecutionState() == null ? null : convertHlrcToInternal(status.getExecutionState()), + convertWatchStatusState(status.state()), + status.getExecutionState() == null ? null : convertWatchStatus(status.getExecutionState()), status.lastChecked(), status.lastMetCondition(), actions, status.getHeaders() ); } - private static ActionStatus convertHlrcToInternal(org.elasticsearch.client.watcher.ActionStatus actionStatus) { - return new ActionStatus(convertHlrcToInternal(actionStatus.ackStatus()), - actionStatus.lastExecution() == null ? null : convertHlrcToInternal(actionStatus.lastExecution()), - actionStatus.lastSuccessfulExecution() == null ? null : convertHlrcToInternal(actionStatus.lastSuccessfulExecution()), - actionStatus.lastThrottle() == null ? null : convertHlrcToInternal(actionStatus.lastThrottle()) + private static ActionStatus convertActionStatus(org.elasticsearch.client.watcher.ActionStatus actionStatus) { + return new ActionStatus(convertAckStatus(actionStatus.ackStatus()), + actionStatus.lastExecution() == null ? null : convertActionStatusExecution(actionStatus.lastExecution()), + actionStatus.lastSuccessfulExecution() == null ? null : convertActionStatusExecution(actionStatus.lastSuccessfulExecution()), + actionStatus.lastThrottle() == null ? null : convertActionStatusThrottle(actionStatus.lastThrottle()) ); } - private static ActionStatus.AckStatus convertHlrcToInternal(org.elasticsearch.client.watcher.ActionStatus.AckStatus ackStatus) { - return new ActionStatus.AckStatus(ackStatus.timestamp(), convertHlrcToInternal(ackStatus.state())); + private static ActionStatus.AckStatus convertAckStatus(org.elasticsearch.client.watcher.ActionStatus.AckStatus ackStatus) { + return new ActionStatus.AckStatus(ackStatus.timestamp(), convertAckStatusState(ackStatus.state())); } - private static ActionStatus.AckStatus.State convertHlrcToInternal(org.elasticsearch.client.watcher.ActionStatus.AckStatus.State state) { + private static ActionStatus.AckStatus.State convertAckStatusState( + org.elasticsearch.client.watcher.ActionStatus.AckStatus.State state) { return ActionStatus.AckStatus.State.valueOf(state.name()); } - private static WatchStatus.State convertHlrcToInternal(org.elasticsearch.client.watcher.WatchStatus.State state) { + private static WatchStatus.State convertWatchStatusState(org.elasticsearch.client.watcher.WatchStatus.State state) { return new WatchStatus.State(state.isActive(), state.getTimestamp()); } - private static ExecutionState convertHlrcToInternal(org.elasticsearch.client.watcher.ExecutionState executionState) { + private static ExecutionState convertWatchStatus(org.elasticsearch.client.watcher.ExecutionState executionState) { return ExecutionState.valueOf(executionState.name()); } - private static ActionStatus.Execution convertHlrcToInternal(org.elasticsearch.client.watcher.ActionStatus.Execution execution) { + private static ActionStatus.Execution convertActionStatusExecution( + org.elasticsearch.client.watcher.ActionStatus.Execution execution) { if (execution.successful()) { return ActionStatus.Execution.successful(execution.timestamp()); } else { @@ -238,7 +196,7 @@ private static ActionStatus.Execution convertHlrcToInternal(org.elasticsearch.cl } } - private static ActionStatus.Throttle convertHlrcToInternal(org.elasticsearch.client.watcher.ActionStatus.Throttle throttle) { + private static ActionStatus.Throttle convertActionStatusThrottle(org.elasticsearch.client.watcher.ActionStatus.Throttle throttle) { return new ActionStatus.Throttle(throttle.timestamp(), throttle.reason()); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/DeleteWatchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/DeleteWatchResponseTests.java index eebf2c9cef184..493375c451745 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/DeleteWatchResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/DeleteWatchResponseTests.java @@ -18,17 +18,19 @@ */ package org.elasticsearch.client.watcher.hlrc; +import org.elasticsearch.client.AbstractResponseTestCase; import org.elasticsearch.client.watcher.DeleteWatchResponse; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.client.AbstractHlrcXContentTestCase; import java.io.IOException; -public class DeleteWatchResponseTests extends AbstractHlrcXContentTestCase< +import static org.hamcrest.Matchers.equalTo; + +public class DeleteWatchResponseTests extends AbstractResponseTestCase< org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse, DeleteWatchResponse> { @Override - protected org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse createTestInstance() { + protected org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse createServerTestInstance() { String id = randomAlphaOfLength(10); long version = randomLongBetween(1, 10); boolean found = randomBoolean(); @@ -36,23 +38,15 @@ protected org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse createTes } @Override - protected org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse doParseInstance(XContentParser parser) throws IOException { - return org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse.fromXContent(parser); - } - - @Override - public DeleteWatchResponse doHlrcParseInstance(XContentParser parser) throws IOException { + protected DeleteWatchResponse doParseToClientInstance(XContentParser parser) throws IOException { return DeleteWatchResponse.fromXContent(parser); } @Override - public org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse convertHlrcToInternal(DeleteWatchResponse instance) { - return new org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse(instance.getId(), instance.getVersion(), - instance.isFound()); - } - - @Override - protected boolean supportsUnknownFields() { - return false; + protected void assertInstances(org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse serverTestInstance, + DeleteWatchResponse clientInstance) { + assertThat(clientInstance.getId(), equalTo(serverTestInstance.getId())); + assertThat(clientInstance.getVersion(), equalTo(serverTestInstance.getVersion())); + assertThat(clientInstance.isFound(), equalTo(serverTestInstance.isFound())); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/ExecuteWatchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/ExecuteWatchResponseTests.java index ace75517a9333..c1492eb53020f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/ExecuteWatchResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/ExecuteWatchResponseTests.java @@ -19,31 +19,23 @@ package org.elasticsearch.client.watcher.hlrc; +import org.elasticsearch.client.AbstractResponseTestCase; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.client.AbstractHlrcXContentTestCase; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchResponse; import java.io.IOException; -public class ExecuteWatchResponseTests - extends AbstractHlrcXContentTestCase { +import static org.hamcrest.Matchers.equalTo; - @Override - public org.elasticsearch.client.watcher.ExecuteWatchResponse doHlrcParseInstance(XContentParser parser) throws IOException { - return org.elasticsearch.client.watcher.ExecuteWatchResponse.fromXContent(parser); - } +public class ExecuteWatchResponseTests extends AbstractResponseTestCase< + ExecuteWatchResponse, org.elasticsearch.client.watcher.ExecuteWatchResponse> { @Override - public ExecuteWatchResponse convertHlrcToInternal(org.elasticsearch.client.watcher.ExecuteWatchResponse instance) { - return new ExecuteWatchResponse(instance.getRecordId(), instance.getRecord(), XContentType.JSON); - } - - @Override - protected ExecuteWatchResponse createTestInstance() { + protected ExecuteWatchResponse createServerTestInstance() { String id = "my_watch_0-2015-06-02T23:17:55.124Z"; try { XContentBuilder builder = XContentFactory.jsonBuilder(); @@ -66,12 +58,14 @@ protected ExecuteWatchResponse createTestInstance() { } @Override - protected ExecuteWatchResponse doParseInstance(XContentParser parser) throws IOException { - return ExecuteWatchResponse.fromXContent(parser); + protected org.elasticsearch.client.watcher.ExecuteWatchResponse doParseToClientInstance(XContentParser parser) throws IOException { + return org.elasticsearch.client.watcher.ExecuteWatchResponse.fromXContent(parser); } @Override - protected boolean supportsUnknownFields() { - return false; + protected void assertInstances(ExecuteWatchResponse serverTestInstance, + org.elasticsearch.client.watcher.ExecuteWatchResponse clientInstance) { + assertThat(clientInstance.getRecordId(), equalTo(serverTestInstance.getRecordId())); + assertThat(clientInstance.getRecordAsMap(), equalTo(serverTestInstance.getRecordSource().getAsMap())); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/PutWatchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/PutWatchResponseTests.java index 9b65618cafc46..a47de0d15fda6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/PutWatchResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/hlrc/PutWatchResponseTests.java @@ -18,17 +18,19 @@ */ package org.elasticsearch.client.watcher.hlrc; +import org.elasticsearch.client.AbstractResponseTestCase; import org.elasticsearch.client.watcher.PutWatchResponse; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.client.AbstractHlrcXContentTestCase; import java.io.IOException; -public class PutWatchResponseTests extends AbstractHlrcXContentTestCase< +import static org.hamcrest.Matchers.equalTo; + +public class PutWatchResponseTests extends AbstractResponseTestCase< org.elasticsearch.protocol.xpack.watcher.PutWatchResponse, PutWatchResponse> { @Override - protected org.elasticsearch.protocol.xpack.watcher.PutWatchResponse createTestInstance() { + protected org.elasticsearch.protocol.xpack.watcher.PutWatchResponse createServerTestInstance() { String id = randomAlphaOfLength(10); long seqNo = randomNonNegativeLong(); long primaryTerm = randomLongBetween(1, 20); @@ -38,23 +40,17 @@ protected org.elasticsearch.protocol.xpack.watcher.PutWatchResponse createTestIn } @Override - protected org.elasticsearch.protocol.xpack.watcher.PutWatchResponse doParseInstance(XContentParser parser) throws IOException { - return org.elasticsearch.protocol.xpack.watcher.PutWatchResponse.fromXContent(parser); - } - - @Override - public PutWatchResponse doHlrcParseInstance(XContentParser parser) throws IOException { - return org.elasticsearch.client.watcher.PutWatchResponse.fromXContent(parser); - } - - @Override - public org.elasticsearch.protocol.xpack.watcher.PutWatchResponse convertHlrcToInternal(PutWatchResponse instance) { - return new org.elasticsearch.protocol.xpack.watcher.PutWatchResponse(instance.getId(), instance.getVersion(), - instance.getSeqNo(), instance.getPrimaryTerm(), instance.isCreated()); + protected PutWatchResponse doParseToClientInstance(XContentParser parser) throws IOException { + return PutWatchResponse.fromXContent(parser); } @Override - protected boolean supportsUnknownFields() { - return false; + protected void assertInstances(org.elasticsearch.protocol.xpack.watcher.PutWatchResponse serverTestInstance, + PutWatchResponse clientInstance) { + assertThat(clientInstance.getId(), equalTo(serverTestInstance.getId())); + assertThat(clientInstance.getSeqNo(), equalTo(serverTestInstance.getSeqNo())); + assertThat(clientInstance.getPrimaryTerm(), equalTo(serverTestInstance.getPrimaryTerm())); + assertThat(clientInstance.getVersion(), equalTo(serverTestInstance.getVersion())); + assertThat(clientInstance.isCreated(), equalTo(serverTestInstance.isCreated())); } } From e6444d3007f21ce4ea032daf07ab2c6f18611ee4 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 28 Jun 2019 14:01:47 -0700 Subject: [PATCH 070/140] Add StreamableResponseAction to aid in deprecation of Streamable (#43770) The Action base class currently works for both Streamable and Writeable response types. This commit intorduces StreamableResponseAction, for which only the legacy Action implementions which provide newResponse() will extend. This eliminates the need for overriding newResponse() with an UnsupportedOperationException. relates #34389 --- .../noop/action/bulk/NoopBulkAction.java | 4 +-- .../noop/action/search/NoopSearchAction.java | 5 --- .../ingest/common/GrokProcessorGetAction.java | 4 +-- .../mustache/MultiSearchTemplateAction.java | 5 --- .../script/mustache/SearchTemplateAction.java | 5 --- .../action/PainlessContextAction.java | 5 --- .../action/PainlessExecuteAction.java | 4 +-- .../index/rankeval/RankEvalAction.java | 4 +-- .../index/reindex/RethrottleAction.java | 5 --- .../java/org/elasticsearch/action/Action.java | 28 ++++++++-------- ...on2.java => StreamableResponseAction.java} | 32 +++++++++++-------- .../ClusterAllocationExplainAction.java | 4 +-- .../AddVotingConfigExclusionsAction.java | 5 --- .../ClearVotingConfigExclusionsAction.java | 5 --- .../cluster/health/ClusterHealthAction.java | 4 +-- .../hotthreads/NodesHotThreadsAction.java | 4 +-- .../cluster/node/info/NodesInfoAction.java | 4 +-- .../NodesReloadSecureSettingsAction.java | 4 +-- .../cluster/node/stats/NodesStatsAction.java | 4 +-- .../node/tasks/cancel/CancelTasksAction.java | 5 --- .../cluster/node/tasks/get/GetTaskAction.java | 4 +-- .../node/tasks/list/ListTasksAction.java | 5 --- .../cluster/node/usage/NodesUsageAction.java | 4 +-- .../cluster/remote/RemoteInfoAction.java | 4 +-- .../delete/DeleteRepositoryAction.java | 5 --- .../get/GetRepositoriesAction.java | 5 --- .../repositories/put/PutRepositoryAction.java | 5 --- .../verify/VerifyRepositoryAction.java | 4 +-- .../cluster/reroute/ClusterRerouteAction.java | 5 --- .../settings/ClusterUpdateSettingsAction.java | 5 --- .../shards/ClusterSearchShardsAction.java | 5 --- .../create/CreateSnapshotAction.java | 4 +-- .../delete/DeleteSnapshotAction.java | 5 --- .../snapshots/get/GetSnapshotsAction.java | 5 --- .../restore/RestoreSnapshotAction.java | 4 +-- .../status/SnapshotsStatusAction.java | 4 +-- .../cluster/state/ClusterStateAction.java | 4 +-- .../cluster/stats/ClusterStatsAction.java | 4 +-- .../DeleteStoredScriptAction.java | 5 --- .../storedscripts/GetStoredScriptAction.java | 4 +-- .../storedscripts/PutStoredScriptAction.java | 5 --- .../tasks/PendingClusterTasksAction.java | 4 +-- .../indices/alias/IndicesAliasesAction.java | 5 --- .../indices/alias/get/GetAliasesAction.java | 4 +-- .../admin/indices/analyze/AnalyzeAction.java | 5 --- .../cache/clear/ClearIndicesCacheAction.java | 4 +-- .../admin/indices/close/CloseIndexAction.java | 5 --- .../indices/create/CreateIndexAction.java | 5 --- .../indices/delete/DeleteIndexAction.java | 5 --- .../admin/indices/flush/FlushAction.java | 4 +-- .../indices/flush/SyncedFlushAction.java | 4 +-- .../indices/forcemerge/ForceMergeAction.java | 4 +-- .../admin/indices/get/GetIndexAction.java | 4 +-- .../mapping/get/GetFieldMappingsAction.java | 5 --- .../mapping/get/GetMappingsAction.java | 4 +-- .../TransportGetFieldMappingsIndexAction.java | 3 +- .../indices/mapping/put/PutMappingAction.java | 5 --- .../admin/indices/open/OpenIndexAction.java | 5 --- .../indices/recovery/RecoveryAction.java | 4 +-- .../admin/indices/refresh/RefreshAction.java | 4 +-- .../indices/rollover/RolloverAction.java | 5 --- .../segments/IndicesSegmentsAction.java | 4 +-- .../settings/get/GetSettingsAction.java | 4 +-- .../settings/put/UpdateSettingsAction.java | 5 --- .../shards/IndicesShardStoresAction.java | 4 +-- .../admin/indices/shrink/ResizeAction.java | 5 --- .../admin/indices/shrink/ShrinkAction.java | 5 --- .../indices/stats/IndicesStatsAction.java | 4 +-- .../delete/DeleteIndexTemplateAction.java | 5 --- .../template/get/GetIndexTemplatesAction.java | 4 +-- .../template/put/PutIndexTemplateAction.java | 5 --- .../upgrade/get/UpgradeStatusAction.java | 4 +-- .../indices/upgrade/post/UpgradeAction.java | 4 +-- .../upgrade/post/UpgradeSettingsAction.java | 5 --- .../validate/query/ValidateQueryAction.java | 4 +-- .../elasticsearch/action/bulk/BulkAction.java | 4 +-- .../action/bulk/TransportShardBulkAction.java | 3 +- .../action/delete/DeleteAction.java | 4 +-- .../action/explain/ExplainAction.java | 5 --- .../fieldcaps/FieldCapabilitiesAction.java | 4 +-- ...TransportFieldCapabilitiesIndexAction.java | 3 +- .../elasticsearch/action/get/GetAction.java | 5 --- .../action/get/MultiGetAction.java | 4 +-- .../get/TransportShardMultiGetAction.java | 3 +- .../action/index/IndexAction.java | 4 +-- .../action/ingest/DeletePipelineAction.java | 5 --- .../action/ingest/GetPipelineAction.java | 4 +-- .../action/ingest/PutPipelineAction.java | 5 --- .../action/ingest/SimulatePipelineAction.java | 4 +-- .../elasticsearch/action/main/MainAction.java | 4 +-- .../action/search/ClearScrollAction.java | 4 +-- .../action/search/MultiSearchAction.java | 5 --- .../action/search/SearchAction.java | 5 --- .../action/search/SearchScrollAction.java | 5 --- .../termvectors/MultiTermVectorsAction.java | 4 +-- .../action/termvectors/TermVectorsAction.java | 5 --- .../TransportShardMultiTermsVectorAction.java | 3 +- .../action/update/UpdateAction.java | 4 +-- .../index/reindex/DeleteByQueryAction.java | 4 +-- .../index/reindex/ReindexAction.java | 4 +-- .../index/reindex/UpdateByQueryAction.java | 4 +-- .../index/seqno/RetentionLeaseActions.java | 8 ++--- .../CompletionPersistentTaskAction.java | 4 +-- .../RemovePersistentTaskAction.java | 4 +-- .../persistent/StartPersistentTaskAction.java | 4 +-- .../UpdatePersistentTaskStatusAction.java | 4 +-- .../action/ActionModuleTests.java | 2 +- .../org/elasticsearch/action/ActionTests.java | 2 +- .../cluster/node/tasks/TestTaskPlugin.java | 8 ++--- .../InternalOrPrivateSettingsPlugin.java | 4 +-- .../persistent/TestPersistentTasksPlugin.java | 5 --- .../xpack/ccr/action/ShardChangesAction.java | 5 --- .../bulk/BulkShardOperationsAction.java | 4 +-- .../ClearCcrRestoreSessionAction.java | 5 --- .../DeleteInternalCcrRepositoryAction.java | 5 --- .../GetCcrRestoreFileChunkAction.java | 5 --- .../PutCcrRestoreSessionAction.java | 5 --- .../PutInternalCcrRepositoryAction.java | 5 --- .../license/DeleteLicenseAction.java | 5 --- .../license/GetBasicStatusAction.java | 4 +-- .../license/GetLicenseAction.java | 4 +-- .../license/GetTrialStatusAction.java | 4 +-- .../license/PostStartBasicAction.java | 5 --- .../license/PostStartTrialAction.java | 4 +-- .../license/PutLicenseAction.java | 5 --- .../core/action/ReloadAnalyzerAction.java | 4 +-- .../action/TransportFreezeIndexAction.java | 5 --- .../xpack/core/action/XPackInfoAction.java | 4 +-- .../core/action/XPackInfoFeatureAction.java | 4 +-- .../xpack/core/action/XPackUsageAction.java | 4 +-- .../core/action/XPackUsageFeatureAction.java | 4 +-- .../xpack/core/ccr/action/CcrStatsAction.java | 5 --- .../action/DeleteAutoFollowPatternAction.java | 5 --- .../core/ccr/action/FollowInfoAction.java | 5 --- .../core/ccr/action/FollowStatsAction.java | 5 --- .../core/ccr/action/ForgetFollowerAction.java | 4 +-- .../action/GetAutoFollowPatternAction.java | 5 --- .../core/ccr/action/PauseFollowAction.java | 5 --- .../action/PutAutoFollowPatternAction.java | 5 --- .../core/ccr/action/PutFollowAction.java | 5 --- .../core/ccr/action/ResumeFollowAction.java | 5 --- .../xpack/core/ccr/action/UnfollowAction.java | 5 --- .../DeleteDataFrameTransformAction.java | 5 --- .../action/GetDataFrameTransformsAction.java | 5 --- .../GetDataFrameTransformsStatsAction.java | 5 --- .../PreviewDataFrameTransformAction.java | 5 --- .../action/PutDataFrameTransformAction.java | 5 --- .../action/StartDataFrameTransformAction.java | 5 --- .../StartDataFrameTransformTaskAction.java | 5 --- .../action/StopDataFrameTransformAction.java | 5 --- .../deprecation/DeprecationInfoAction.java | 4 +-- .../NodesDeprecationCheckAction.java | 3 +- .../core/graph/action/GraphExploreAction.java | 4 +-- .../action/DeleteLifecycleAction.java | 5 --- .../action/ExplainLifecycleAction.java | 4 +-- .../action/GetLifecycleAction.java | 4 +-- .../action/GetStatusAction.java | 4 +-- .../action/MoveToStepAction.java | 5 --- .../action/PutLifecycleAction.java | 5 --- .../RemoveIndexLifecyclePolicyAction.java | 4 +-- .../indexlifecycle/action/RetryAction.java | 5 --- .../indexlifecycle/action/StartILMAction.java | 5 --- .../indexlifecycle/action/StopILMAction.java | 5 --- .../xpack/core/ml/action/CloseJobAction.java | 5 --- .../core/ml/action/DeleteCalendarAction.java | 5 --- .../ml/action/DeleteCalendarEventAction.java | 5 --- .../DeleteDataFrameAnalyticsAction.java | 5 --- .../core/ml/action/DeleteDatafeedAction.java | 5 --- .../ml/action/DeleteExpiredDataAction.java | 4 +-- .../core/ml/action/DeleteFilterAction.java | 5 --- .../core/ml/action/DeleteForecastAction.java | 5 --- .../xpack/core/ml/action/DeleteJobAction.java | 5 --- .../ml/action/DeleteModelSnapshotAction.java | 5 --- .../ml/action/EvaluateDataFrameAction.java | 4 +-- .../ml/action/FinalizeJobExecutionAction.java | 5 --- .../ml/action/FindFileStructureAction.java | 4 +-- .../xpack/core/ml/action/FlushJobAction.java | 5 --- .../core/ml/action/ForecastJobAction.java | 5 --- .../core/ml/action/GetBucketsAction.java | 4 +-- .../ml/action/GetCalendarEventsAction.java | 4 +-- .../core/ml/action/GetCalendarsAction.java | 4 +-- .../core/ml/action/GetCategoriesAction.java | 4 +-- .../action/GetDataFrameAnalyticsAction.java | 4 +-- .../GetDataFrameAnalyticsStatsAction.java | 5 --- .../core/ml/action/GetDatafeedsAction.java | 4 +-- .../ml/action/GetDatafeedsStatsAction.java | 4 +-- .../core/ml/action/GetFiltersAction.java | 5 ++- .../core/ml/action/GetInfluencersAction.java | 5 ++- .../xpack/core/ml/action/GetJobsAction.java | 4 +-- .../core/ml/action/GetJobsStatsAction.java | 5 --- .../ml/action/GetModelSnapshotsAction.java | 4 +-- .../ml/action/GetOverallBucketsAction.java | 4 +-- .../core/ml/action/GetRecordsAction.java | 4 +-- .../core/ml/action/IsolateDatafeedAction.java | 5 --- .../core/ml/action/KillProcessAction.java | 5 --- .../xpack/core/ml/action/MlInfoAction.java | 4 +-- .../xpack/core/ml/action/OpenJobAction.java | 5 --- .../core/ml/action/PersistJobAction.java | 5 --- .../ml/action/PostCalendarEventsAction.java | 4 +-- .../xpack/core/ml/action/PostDataAction.java | 5 --- .../core/ml/action/PreviewDatafeedAction.java | 4 +-- .../core/ml/action/PutCalendarAction.java | 4 +-- .../action/PutDataFrameAnalyticsAction.java | 4 +-- .../core/ml/action/PutDatafeedAction.java | 4 +-- .../xpack/core/ml/action/PutFilterAction.java | 4 +-- .../xpack/core/ml/action/PutJobAction.java | 4 +-- .../ml/action/RevertModelSnapshotAction.java | 4 +-- .../core/ml/action/SetUpgradeModeAction.java | 5 --- .../action/StartDataFrameAnalyticsAction.java | 5 --- .../core/ml/action/StartDatafeedAction.java | 5 --- .../action/StopDataFrameAnalyticsAction.java | 5 --- .../core/ml/action/StopDatafeedAction.java | 5 --- .../ml/action/UpdateCalendarJobAction.java | 4 +-- .../core/ml/action/UpdateDatafeedAction.java | 4 +-- .../core/ml/action/UpdateFilterAction.java | 4 +-- .../xpack/core/ml/action/UpdateJobAction.java | 4 +-- .../ml/action/UpdateModelSnapshotAction.java | 4 +-- .../core/ml/action/UpdateProcessAction.java | 5 --- .../ml/action/ValidateDetectorAction.java | 5 --- .../ml/action/ValidateJobConfigAction.java | 5 --- .../action/MonitoringBulkAction.java | 4 +-- .../rollup/action/DeleteRollupJobAction.java | 5 --- .../rollup/action/GetRollupCapsAction.java | 5 ++- .../action/GetRollupIndexCapsAction.java | 5 ++- .../rollup/action/GetRollupJobsAction.java | 5 --- .../rollup/action/PutRollupJobAction.java | 5 --- .../rollup/action/RollupSearchAction.java | 5 --- .../rollup/action/StartRollupJobAction.java | 5 --- .../rollup/action/StopRollupJobAction.java | 5 --- .../security/action/CreateApiKeyAction.java | 5 --- .../core/security/action/GetApiKeyAction.java | 5 --- .../action/InvalidateApiKeyAction.java | 5 --- .../oidc/OpenIdConnectAuthenticateAction.java | 5 --- .../oidc/OpenIdConnectLogoutAction.java | 5 --- ...nIdConnectPrepareAuthenticationAction.java | 5 --- .../privilege/DeletePrivilegesAction.java | 4 +-- .../action/privilege/GetPrivilegesAction.java | 4 +-- .../action/privilege/PutPrivilegesAction.java | 4 +-- .../action/realm/ClearRealmCacheAction.java | 4 +-- .../action/role/ClearRolesCacheAction.java | 4 +-- .../action/role/DeleteRoleAction.java | 4 +-- .../security/action/role/GetRolesAction.java | 4 +-- .../security/action/role/PutRoleAction.java | 4 +-- .../rolemapping/DeleteRoleMappingAction.java | 4 +-- .../rolemapping/GetRoleMappingsAction.java | 4 +-- .../rolemapping/PutRoleMappingAction.java | 4 +-- .../action/saml/SamlAuthenticateAction.java | 4 +-- .../saml/SamlInvalidateSessionAction.java | 4 +-- .../action/saml/SamlLogoutAction.java | 4 +-- .../saml/SamlPrepareAuthenticationAction.java | 4 +-- .../action/token/CreateTokenAction.java | 4 +-- .../action/token/InvalidateTokenAction.java | 4 +-- .../action/token/RefreshTokenAction.java | 6 ++-- .../action/user/AuthenticateAction.java | 4 +-- .../action/user/ChangePasswordAction.java | 4 +-- .../action/user/DeleteUserAction.java | 4 +-- .../action/user/GetUserPrivilegesAction.java | 4 +-- .../security/action/user/GetUsersAction.java | 4 +-- .../action/user/HasPrivilegesAction.java | 4 +-- .../security/action/user/PutUserAction.java | 4 +-- .../action/user/SetEnabledAction.java | 4 +-- .../ssl/action/GetCertificateInfoAction.java | 4 +-- .../upgrade/actions/IndexUpgradeAction.java | 4 +-- .../actions/IndexUpgradeInfoAction.java | 4 +-- .../transport/actions/ack/AckWatchAction.java | 4 +-- .../actions/activate/ActivateWatchAction.java | 4 +-- .../actions/delete/DeleteWatchAction.java | 4 +-- .../actions/execute/ExecuteWatchAction.java | 4 +-- .../transport/actions/get/GetWatchAction.java | 4 ++- .../transport/actions/put/PutWatchAction.java | 4 +-- .../actions/service/WatcherServiceAction.java | 5 --- .../actions/stats/WatcherStatsAction.java | 4 +-- .../ml/job/persistence/MockClientBuilder.java | 3 +- .../sql/action/SqlClearCursorAction.java | 4 +-- .../xpack/sql/action/SqlQueryAction.java | 4 +-- .../xpack/sql/action/SqlTranslateAction.java | 4 +-- .../xpack/sql/plugin/SqlStatsAction.java | 4 +-- 277 files changed, 346 insertions(+), 934 deletions(-) rename server/src/main/java/org/elasticsearch/action/{Action2.java => StreamableResponseAction.java} (53%) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java index 2bfd3b0cc8ed4..40c65aee70074 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.plugin.noop.action.bulk; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.bulk.BulkResponse; -public class NoopBulkAction extends Action { +public class NoopBulkAction extends StreamableResponseAction { public static final String NAME = "mock:data/write/bulk"; public static final NoopBulkAction INSTANCE = new NoopBulkAction(); diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java index 9b390e1ffddbc..aa316ae435ac3 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java @@ -30,11 +30,6 @@ private NoopSearchAction() { super(NAME); } - @Override - public SearchResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return SearchResponse::new; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 1141a4cf7e891..ee2f49390b8db 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -18,11 +18,11 @@ */ package org.elasticsearch.ingest.common; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.node.NodeClient; @@ -45,7 +45,7 @@ import static org.elasticsearch.ingest.common.IngestCommonPlugin.GROK_PATTERNS; import static org.elasticsearch.rest.RestRequest.Method.GET; -public class GrokProcessorGetAction extends Action { +public class GrokProcessorGetAction extends StreamableResponseAction { static final GrokProcessorGetAction INSTANCE = new GrokProcessorGetAction(); static final String NAME = "cluster:admin/ingest/processor/grok/get"; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java index a9a44d0471586..573c5888991d1 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java @@ -31,11 +31,6 @@ private MultiSearchTemplateAction() { super(NAME); } - @Override - public MultiSearchTemplateResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return MultiSearchTemplateResponse::new; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java index 5d905ec39e1ab..7bd57154e26fc 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java @@ -31,11 +31,6 @@ private SearchTemplateAction() { super(NAME); } - @Override - public SearchTemplateResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return SearchTemplateResponse::new; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java index c947de1fd82d3..4abad4d78af3e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java @@ -75,11 +75,6 @@ private PainlessContextAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException(); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 32b61f9c38ba4..349a3281b4ca9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -99,8 +99,8 @@ private PainlessExecuteAction() { } @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + public Writeable.Reader getResponseReader() { + return Response::new; } public static class Request extends SingleShardRequest implements ToXContentObject { diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java index 54e89fe0e98b8..664377786f87f 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for explaining evaluating search ranking results. */ -public class RankEvalAction extends Action { +public class RankEvalAction extends StreamableResponseAction { public static final RankEvalAction INSTANCE = new RankEvalAction(); public static final String NAME = "indices:data/read/rank_eval"; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java index 3cb2c60c62373..513b4261bdf52 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java @@ -31,11 +31,6 @@ private RethrottleAction() { super(NAME); } - @Override - public ListTasksResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return ListTasksResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/Action.java b/server/src/main/java/org/elasticsearch/action/Action.java index f0df6202072a4..0037533797d27 100644 --- a/server/src/main/java/org/elasticsearch/action/Action.java +++ b/server/src/main/java/org/elasticsearch/action/Action.java @@ -26,15 +26,27 @@ /** * A generic action. Should strive to make it a singleton. */ -public abstract class Action { +public class Action { private final String name; + private final Writeable.Reader responseReader; /** * @param name The name of the action, must be unique across actions. + * @deprecated Pass a {@link Writeable.Reader} with {@link } */ + @Deprecated protected Action(String name) { + this(name, null); + } + + /** + * @param name The name of the action, must be unique across actions. + * @param responseReader A reader for the response type + */ + public Action(String name, Writeable.Reader responseReader) { this.name = name; + this.responseReader = responseReader; } /** @@ -44,23 +56,11 @@ public String name() { return this.name; } - /** - * Creates a new response instance. - * @deprecated Implement {@link #getResponseReader()} instead and make this method throw an - * {@link UnsupportedOperationException} - */ - @Deprecated - public abstract Response newResponse(); - /** * Get a reader that can create a new instance of the class from a {@link org.elasticsearch.common.io.stream.StreamInput} */ public Writeable.Reader getResponseReader() { - return in -> { - Response response = newResponse(); - response.readFrom(in); - return response; - }; + return responseReader; } /** diff --git a/server/src/main/java/org/elasticsearch/action/Action2.java b/server/src/main/java/org/elasticsearch/action/StreamableResponseAction.java similarity index 53% rename from server/src/main/java/org/elasticsearch/action/Action2.java rename to server/src/main/java/org/elasticsearch/action/StreamableResponseAction.java index 791e0a2e42274..c7eecfc35d738 100644 --- a/server/src/main/java/org/elasticsearch/action/Action2.java +++ b/server/src/main/java/org/elasticsearch/action/StreamableResponseAction.java @@ -19,29 +19,33 @@ package org.elasticsearch.action; - import org.elasticsearch.common.io.stream.Writeable; /** - * An action for which the response class implements {@link org.elasticsearch.common.io.stream.Writeable}. + * An action for with the response type implements {@link org.elasticsearch.common.io.stream.Streamable}. + * @deprecated Use {@link Action} directly and provide a {@link Writeable.Reader} */ -public class Action2 extends Action { - private final Writeable.Reader responseReader; +@Deprecated +public abstract class StreamableResponseAction extends Action { - public Action2(String name, Writeable.Reader responseReader) { + protected StreamableResponseAction(String name) { super(name); - this.responseReader = responseReader; - } - - @Override - public Response newResponse() { - throw new UnsupportedOperationException(); } /** - * Get a reader that can create a new instance of the class from a {@link org.elasticsearch.common.io.stream.StreamInput} + * Creates a new response instance. + * @deprecated Implement {@link #getResponseReader()} instead and make this method throw an + * {@link UnsupportedOperationException} */ - public Writeable.Reader getResponseReader() { - return responseReader; + @Deprecated + public abstract Response newResponse(); + + @Override + public final Writeable.Reader getResponseReader() { + return in -> { + Response response = newResponse(); + response.readFrom(in); + return response; + }; } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java index 19d5378b305ed..b4b348ae97ee0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.cluster.allocation; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for explaining shard allocation for a shard in the cluster */ -public class ClusterAllocationExplainAction extends Action { +public class ClusterAllocationExplainAction extends StreamableResponseAction { public static final ClusterAllocationExplainAction INSTANCE = new ClusterAllocationExplainAction(); public static final String NAME = "cluster:monitor/allocation/explain"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java index 6fdd324fa47c6..20f1e3c50443e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java @@ -29,11 +29,6 @@ private AddVotingConfigExclusionsAction() { super(NAME); } - @Override - public AddVotingConfigExclusionsResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Reader getResponseReader() { return AddVotingConfigExclusionsResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java index 49b578f48adf8..6cafcb7653f91 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java @@ -29,11 +29,6 @@ private ClearVotingConfigExclusionsAction() { super(NAME); } - @Override - public ClearVotingConfigExclusionsResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Reader getResponseReader() { return ClearVotingConfigExclusionsResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java index 0cd148ee231e0..ceb2a145fb64a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.health; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ClusterHealthAction extends Action { +public class ClusterHealthAction extends StreamableResponseAction { public static final ClusterHealthAction INSTANCE = new ClusterHealthAction(); public static final String NAME = "cluster:monitor/health"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java index 4ea7ee5bc3bbe..317fa984163e0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class NodesHotThreadsAction extends Action { +public class NodesHotThreadsAction extends StreamableResponseAction { public static final NodesHotThreadsAction INSTANCE = new NodesHotThreadsAction(); public static final String NAME = "cluster:monitor/nodes/hot_threads"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java index edc5ed7e83f0f..b860f07c8ff99 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.node.info; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class NodesInfoAction extends Action { +public class NodesInfoAction extends StreamableResponseAction { public static final NodesInfoAction INSTANCE = new NodesInfoAction(); public static final String NAME = "cluster:monitor/nodes/info"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java index 19e8fc1929c5d..0a0c8a74fe9d1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java @@ -19,10 +19,10 @@ package org.elasticsearch.action.admin.cluster.node.reload; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; public class NodesReloadSecureSettingsAction - extends Action { + extends StreamableResponseAction { public static final NodesReloadSecureSettingsAction INSTANCE = new NodesReloadSecureSettingsAction(); public static final String NAME = "cluster:admin/nodes/reload_secure_settings"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java index bc8c81ef1e0f5..1febe1b4872f3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.node.stats; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class NodesStatsAction extends Action { +public class NodesStatsAction extends StreamableResponseAction { public static final NodesStatsAction INSTANCE = new NodesStatsAction(); public static final String NAME = "cluster:monitor/nodes/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java index 23cb69cf807f7..39532d18519c3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java @@ -34,11 +34,6 @@ private CancelTasksAction() { super(NAME); } - @Override - public CancelTasksResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return CancelTasksResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java index 4e88963de4c98..cdb5bbc39068a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.cluster.node.tasks.get; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for retrieving a list of currently running tasks */ -public class GetTaskAction extends Action { +public class GetTaskAction extends StreamableResponseAction { public static final String TASKS_ORIGIN = "tasks"; public static final GetTaskAction INSTANCE = new GetTaskAction(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java index abba798c83cc1..ffec4aed0569a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java @@ -34,11 +34,6 @@ private ListTasksAction() { super(NAME); } - @Override - public ListTasksResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return ListTasksResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java index 6bc6dce54945a..34d864e1476fb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.node.usage; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class NodesUsageAction extends Action { +public class NodesUsageAction extends StreamableResponseAction { public static final NodesUsageAction INSTANCE = new NodesUsageAction(); public static final String NAME = "cluster:monitor/nodes/usage"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java index 3b998049daaaa..a3f2b2d406a9e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.remote; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public final class RemoteInfoAction extends Action { +public final class RemoteInfoAction extends StreamableResponseAction { public static final String NAME = "cluster:monitor/remote/info"; public static final RemoteInfoAction INSTANCE = new RemoteInfoAction(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java index 205521a6b6f48..f4b5cb2559f6a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java @@ -35,11 +35,6 @@ private DeleteRepositoryAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java index eac4d971b60a1..7987f26c01b92 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java @@ -34,11 +34,6 @@ private GetRepositoriesAction() { super(NAME); } - @Override - public GetRepositoriesResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return GetRepositoriesResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java index ea021f6ba674b..1e070c5ed98d1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java @@ -35,11 +35,6 @@ private PutRepositoryAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java index 743b0a57f3aa3..67580e6bf8126 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.cluster.repositories.verify; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Unregister repository action */ -public class VerifyRepositoryAction extends Action { +public class VerifyRepositoryAction extends StreamableResponseAction { public static final VerifyRepositoryAction INSTANCE = new VerifyRepositoryAction(); public static final String NAME = "cluster:admin/repository/verify"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java index c341618729cd0..e92a136f838c0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java @@ -31,11 +31,6 @@ private ClusterRerouteAction() { super(NAME); } - @Override - public ClusterRerouteResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return ClusterRerouteResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java index c23a05338f299..9c359b919eee1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java @@ -31,11 +31,6 @@ private ClusterUpdateSettingsAction() { super(NAME); } - @Override - public ClusterUpdateSettingsResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return ClusterUpdateSettingsResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java index 869aecf095431..cb323c6494df5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java @@ -31,11 +31,6 @@ private ClusterSearchShardsAction() { super(NAME); } - @Override - public ClusterSearchShardsResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return ClusterSearchShardsResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java index d37132a1d81e7..988ac070fb75d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.create; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Create snapshot action */ -public class CreateSnapshotAction extends Action { +public class CreateSnapshotAction extends StreamableResponseAction { public static final CreateSnapshotAction INSTANCE = new CreateSnapshotAction(); public static final String NAME = "cluster:admin/snapshot/create"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java index 1413f0df0686a..8cf746e846056 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java @@ -35,11 +35,6 @@ private DeleteSnapshotAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java index b77130f1875be..9b4ca545044a3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java @@ -34,11 +34,6 @@ private GetSnapshotsAction() { super(NAME); } - @Override - public GetSnapshotsResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return GetSnapshotsResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java index e633ce43e66cd..8cf8d461f4eb3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.restore; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Restore snapshot action */ -public class RestoreSnapshotAction extends Action { +public class RestoreSnapshotAction extends StreamableResponseAction { public static final RestoreSnapshotAction INSTANCE = new RestoreSnapshotAction(); public static final String NAME = "cluster:admin/snapshot/restore"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java index ea28d26b40ffa..09ca89f5d521e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Snapshots status action */ -public class SnapshotsStatusAction extends Action { +public class SnapshotsStatusAction extends StreamableResponseAction { public static final SnapshotsStatusAction INSTANCE = new SnapshotsStatusAction(); public static final String NAME = "cluster:admin/snapshot/status"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java index f48df06d53c6f..0087cf8d4f5ea 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.state; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ClusterStateAction extends Action { +public class ClusterStateAction extends StreamableResponseAction { public static final ClusterStateAction INSTANCE = new ClusterStateAction(); public static final String NAME = "cluster:monitor/state"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java index 049ce62d9df40..2e1aa0d023d7c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.stats; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ClusterStatsAction extends Action { +public class ClusterStatsAction extends StreamableResponseAction { public static final ClusterStatsAction INSTANCE = new ClusterStatsAction(); public static final String NAME = "cluster:monitor/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java index 1e07090ea01c4..307440adfeee8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java @@ -32,11 +32,6 @@ private DeleteStoredScriptAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java index e8015a4487496..6b4c229ebdd38 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.storedscripts; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetStoredScriptAction extends Action { +public class GetStoredScriptAction extends StreamableResponseAction { public static final GetStoredScriptAction INSTANCE = new GetStoredScriptAction(); public static final String NAME = "cluster:admin/script/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java index 7168667af0e1c..673299f275b58 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java @@ -33,11 +33,6 @@ private PutStoredScriptAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java index 296c65146a03c..adfe1054038a0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.tasks; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class PendingClusterTasksAction extends Action { +public class PendingClusterTasksAction extends StreamableResponseAction { public static final PendingClusterTasksAction INSTANCE = new PendingClusterTasksAction(); public static final String NAME = "cluster:monitor/task"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java index 38b1844e73a14..a0582ae704b62 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java @@ -32,11 +32,6 @@ private IndicesAliasesAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java index db423c2aaaa07..9f88d9a16c190 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.alias.get; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetAliasesAction extends Action { +public class GetAliasesAction extends StreamableResponseAction { public static final GetAliasesAction INSTANCE = new GetAliasesAction(); public static final String NAME = "indices:admin/aliases/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index 27b623776fa9c..cbd24a1fa883f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -59,11 +59,6 @@ public Writeable.Reader getResponseReader() { return Response::new; } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - /** * A request to analyze a text associated with a specific index. Allow to provide * the actual analyzer name to perform the analysis with. diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java index e5bdd53bdc7a7..3dd135d9655f5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.cache.clear; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ClearIndicesCacheAction extends Action { +public class ClearIndicesCacheAction extends StreamableResponseAction { public static final ClearIndicesCacheAction INSTANCE = new ClearIndicesCacheAction(); public static final String NAME = "indices:admin/cache/clear"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java index 2d87b75273c25..d4c3bc4503d6c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java @@ -31,11 +31,6 @@ private CloseIndexAction() { super(NAME); } - @Override - public CloseIndexResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return CloseIndexResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java index fd55ec4bf0bf1..620e1cc9f4aeb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java @@ -31,11 +31,6 @@ private CreateIndexAction() { super(NAME); } - @Override - public CreateIndexResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return CreateIndexResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java index 9bb7b2d880a2b..0e45a77e72802 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java @@ -32,11 +32,6 @@ private DeleteIndexAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java index 60d5b43a6c189..721eae72ee02b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.flush; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class FlushAction extends Action { +public class FlushAction extends StreamableResponseAction { public static final FlushAction INSTANCE = new FlushAction(); public static final String NAME = "indices:admin/flush"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java index 5005cd2ec0878..b3af28dd23975 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java @@ -19,10 +19,10 @@ package org.elasticsearch.action.admin.indices.flush; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class SyncedFlushAction extends Action { +public class SyncedFlushAction extends StreamableResponseAction { public static final SyncedFlushAction INSTANCE = new SyncedFlushAction(); public static final String NAME = "indices:admin/synced_flush"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java index 51095435343cb..6b9866afa7a2b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.forcemerge; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ForceMergeAction extends Action { +public class ForceMergeAction extends StreamableResponseAction { public static final ForceMergeAction INSTANCE = new ForceMergeAction(); public static final String NAME = "indices:admin/forcemerge"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java index 86396f246a414..e2c663cb7da36 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.get; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetIndexAction extends Action { +public class GetIndexAction extends StreamableResponseAction { public static final GetIndexAction INSTANCE = new GetIndexAction(); public static final String NAME = "indices:admin/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java index d372d8cf93f30..97cb7b3943d95 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java @@ -31,11 +31,6 @@ private GetFieldMappingsAction() { super(NAME); } - @Override - public GetFieldMappingsResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return GetFieldMappingsResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java index 8bae685fff5dd..6b64cdec306e2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.mapping.get; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetMappingsAction extends Action { +public class GetMappingsAction extends StreamableResponseAction { public static final GetMappingsAction INSTANCE = new GetMappingsAction(); public static final String NAME = "indices:admin/mappings/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 1272846c70c26..919961431d6f9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.Action2; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -65,7 +64,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAction { private static final String ACTION_NAME = GetFieldMappingsAction.NAME + "[index]"; - public static final Action ACTION_INSTANCE = new Action2<>(ACTION_NAME, GetFieldMappingsResponse::new); + public static final Action ACTION_INSTANCE = new Action<>(ACTION_NAME, GetFieldMappingsResponse::new); protected final ClusterService clusterService; private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java index a4cc37f552ee0..117b378bbe5cf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java @@ -32,11 +32,6 @@ private PutMappingAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java index 6d53a3a72d3e9..95cb4d4a78ea2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java @@ -31,11 +31,6 @@ private OpenIndexAction() { super(NAME); } - @Override - public OpenIndexResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return OpenIndexResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java index bfe261b58843a..cea3db5249581 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.indices.recovery; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Recovery information action */ -public class RecoveryAction extends Action { +public class RecoveryAction extends StreamableResponseAction { public static final RecoveryAction INSTANCE = new RecoveryAction(); public static final String NAME = "indices:monitor/recovery"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java index b0dac076b2f49..63815fe2dcf3b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.refresh; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class RefreshAction extends Action { +public class RefreshAction extends StreamableResponseAction { public static final RefreshAction INSTANCE = new RefreshAction(); public static final String NAME = "indices:admin/refresh"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java index 091dc1e09e1a9..e7cc53dbf44dc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java @@ -31,11 +31,6 @@ private RolloverAction() { super(NAME); } - @Override - public RolloverResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return RolloverResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java index 669c31d6b087d..b236bf0cc227c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.segments; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class IndicesSegmentsAction extends Action { +public class IndicesSegmentsAction extends StreamableResponseAction { public static final IndicesSegmentsAction INSTANCE = new IndicesSegmentsAction(); public static final String NAME = "indices:monitor/segments"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java index e4149aaf8f15f..4aa4fe57814af 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.settings.get; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetSettingsAction extends Action { +public class GetSettingsAction extends StreamableResponseAction { public static final GetSettingsAction INSTANCE = new GetSettingsAction(); public static final String NAME = "indices:monitor/settings/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java index acf07668ad344..af7750cff793c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java @@ -32,11 +32,6 @@ private UpdateSettingsAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java index d3ce0077d5e94..3016633caeb7b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.shards; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for {@link TransportIndicesShardStoresAction} @@ -28,7 +28,7 @@ * Shard store information reports which nodes hold shard copies, how recent they are * and any exceptions on opening the shard index or from previous engine failures */ -public class IndicesShardStoresAction extends Action { +public class IndicesShardStoresAction extends StreamableResponseAction { public static final IndicesShardStoresAction INSTANCE = new IndicesShardStoresAction(); public static final String NAME = "indices:monitor/shard_stores"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java index 6169d4a200108..e36adb3def639 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java @@ -31,11 +31,6 @@ private ResizeAction() { super(NAME); } - @Override - public ResizeResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return ResizeResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java index 3aef52f1636e4..7c8c58e4864e4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java @@ -31,11 +31,6 @@ private ShrinkAction() { super(NAME); } - @Override - public ResizeResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return ResizeResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java index 6765279eb6b87..5b91f3f31e5e2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.stats; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class IndicesStatsAction extends Action { +public class IndicesStatsAction extends StreamableResponseAction { public static final IndicesStatsAction INSTANCE = new IndicesStatsAction(); public static final String NAME = "indices:monitor/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java index 2f3dd877f8a9d..c4a5bdc43819f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java @@ -32,11 +32,6 @@ private DeleteIndexTemplateAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java index ac00b80079ca1..3202fa3d6c808 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.action.admin.indices.template.get; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetIndexTemplatesAction extends Action { +public class GetIndexTemplatesAction extends StreamableResponseAction { public static final GetIndexTemplatesAction INSTANCE = new GetIndexTemplatesAction(); public static final String NAME = "indices:admin/template/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java index 9af753bbfdcb8..88c3b9bb09d6c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java @@ -32,11 +32,6 @@ private PutIndexTemplateAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java index 57506b615d692..6545008486a97 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.upgrade.get; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class UpgradeStatusAction extends Action { +public class UpgradeStatusAction extends StreamableResponseAction { public static final UpgradeStatusAction INSTANCE = new UpgradeStatusAction(); public static final String NAME = "indices:monitor/upgrade"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java index 7ec83930e44bf..ea5f511741a99 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.indices.upgrade.post; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Upgrade index/indices action. */ -public class UpgradeAction extends Action { +public class UpgradeAction extends StreamableResponseAction { public static final UpgradeAction INSTANCE = new UpgradeAction(); public static final String NAME = "indices:admin/upgrade"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java index f830ebff3dd82..324e796d214db 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java @@ -32,11 +32,6 @@ private UpgradeSettingsAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java index 93151dd8a2ba9..2b0b21e28bcb4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.validate.query; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ValidateQueryAction extends Action { +public class ValidateQueryAction extends StreamableResponseAction { public static final ValidateQueryAction INSTANCE = new ValidateQueryAction(); public static final String NAME = "indices:admin/validate/query"; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java index f835b57a38b4c..ee5f32e1cfa8a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java @@ -19,11 +19,11 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.TransportRequestOptions; -public class BulkAction extends Action { +public class BulkAction extends StreamableResponseAction { public static final BulkAction INSTANCE = new BulkAction(); public static final String NAME = "indices:data/write/bulk"; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 771010d673d67..d66c3ab0a96c9 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; @@ -78,7 +79,7 @@ public class TransportShardBulkAction extends TransportWriteAction { public static final String ACTION_NAME = BulkAction.NAME + "[s]"; - public static final Action ACTION_INSTANCE = new Action<>(ACTION_NAME) { + public static final Action ACTION_INSTANCE = new StreamableResponseAction<>(ACTION_NAME) { @Override public BulkShardResponse newResponse() { return new BulkShardResponse(); diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java index d78b6f60bffc3..1d43fb102a6d3 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.delete; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class DeleteAction extends Action { +public class DeleteAction extends StreamableResponseAction { public static final DeleteAction INSTANCE = new DeleteAction(); public static final String NAME = "indices:data/write/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java index ba5618ce7de21..2546de35e5445 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java @@ -34,11 +34,6 @@ private ExplainAction() { super(NAME); } - @Override - public ExplainResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return ExplainResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java index 39c6ecce308e0..0cec94839c2a8 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.fieldcaps; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class FieldCapabilitiesAction extends Action { +public class FieldCapabilitiesAction extends StreamableResponseAction { public static final FieldCapabilitiesAction INSTANCE = new FieldCapabilitiesAction(); public static final String NAME = "indices:data/read/field_caps"; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index e7eadf9af0eb9..c043fc42e0633 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.action.Action; -import org.elasticsearch.action.Action2; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; @@ -50,7 +49,7 @@ public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardA private static final String ACTION_NAME = FieldCapabilitiesAction.NAME + "[index]"; public static final Action ACTION_INSTANCE = - new Action2<>(ACTION_NAME, FieldCapabilitiesIndexResponse::new); + new Action<>(ACTION_NAME, FieldCapabilitiesIndexResponse::new); private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/get/GetAction.java b/server/src/main/java/org/elasticsearch/action/get/GetAction.java index 05d1b6c5a4c02..383029090c509 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetAction.java @@ -31,11 +31,6 @@ private GetAction() { super(NAME); } - @Override - public GetResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return GetResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java index 9b69e33239b82..cfb90e1cceeac 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.get; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class MultiGetAction extends Action { +public class MultiGetAction extends StreamableResponseAction { public static final MultiGetAction INSTANCE = new MultiGetAction(); public static final String NAME = "indices:data/read/mget"; diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index a639795946e21..78ce6fda6c4b6 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -21,7 +21,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.Action; -import org.elasticsearch.action.Action2; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -42,7 +41,7 @@ public class TransportShardMultiGetAction extends TransportSingleShardAction { private static final String ACTION_NAME = MultiGetAction.NAME + "[shard]"; - public static final Action ACTION_INSTANCE = new Action2<>(ACTION_NAME, MultiGetShardResponse::new); + public static final Action ACTION_INSTANCE = new Action<>(ACTION_NAME, MultiGetShardResponse::new); private final IndicesService indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexAction.java b/server/src/main/java/org/elasticsearch/action/index/IndexAction.java index 4f3e6068a2a23..6515f75f083a5 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.index; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class IndexAction extends Action { +public class IndexAction extends StreamableResponseAction { public static final IndexAction INSTANCE = new IndexAction(); public static final String NAME = "indices:data/write/index"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java index c2842340ba44e..111974915547d 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java @@ -32,11 +32,6 @@ public DeletePipelineAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java index b2305227ac67f..ac699ea8c3232 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetPipelineAction extends Action { +public class GetPipelineAction extends StreamableResponseAction { public static final GetPipelineAction INSTANCE = new GetPipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/get"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java index b614c847b4f76..06038141e90dc 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java @@ -32,11 +32,6 @@ public PutPipelineAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java index afeb4e01fb0ff..6d05c9fb690c1 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class SimulatePipelineAction extends Action { +public class SimulatePipelineAction extends StreamableResponseAction { public static final SimulatePipelineAction INSTANCE = new SimulatePipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/simulate"; diff --git a/server/src/main/java/org/elasticsearch/action/main/MainAction.java b/server/src/main/java/org/elasticsearch/action/main/MainAction.java index 831ddd0983fad..9d38e913f7732 100644 --- a/server/src/main/java/org/elasticsearch/action/main/MainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/MainAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.main; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class MainAction extends Action { +public class MainAction extends StreamableResponseAction { public static final String NAME = "cluster:monitor/main"; public static final MainAction INSTANCE = new MainAction(); diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java index 660ed1ee17860..101d002d69842 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.search; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ClearScrollAction extends Action { +public class ClearScrollAction extends StreamableResponseAction { public static final ClearScrollAction INSTANCE = new ClearScrollAction(); public static final String NAME = "indices:data/read/scroll/clear"; diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java index 9017a7b94ecb4..fe1efc1e2d05a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java @@ -31,11 +31,6 @@ private MultiSearchAction() { super(NAME); } - @Override - public MultiSearchResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return MultiSearchResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchAction.java index d665595e8d34d..dfa8d66098c5a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchAction.java @@ -31,11 +31,6 @@ private SearchAction() { super(NAME); } - @Override - public SearchResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return SearchResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java index 0b4adfc1ba55c..6c5cf0e46f073 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java @@ -31,11 +31,6 @@ private SearchScrollAction() { super(NAME); } - @Override - public SearchResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return SearchResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java index a894b3480f10f..e3fa0d17cf111 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.termvectors; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class MultiTermVectorsAction extends Action { +public class MultiTermVectorsAction extends StreamableResponseAction { public static final MultiTermVectorsAction INSTANCE = new MultiTermVectorsAction(); public static final String NAME = "indices:data/read/mtv"; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java index 9b223eed3a3c8..12c11bff8325f 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java @@ -31,11 +31,6 @@ private TermVectorsAction() { super(NAME); } - @Override - public TermVectorsResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return TermVectorsResponse::new; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java index aca955748df27..f3d63378cfc75 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -21,6 +21,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -44,7 +45,7 @@ public class TransportShardMultiTermsVectorAction extends private final IndicesService indicesService; private static final String ACTION_NAME = MultiTermVectorsAction.NAME + "[shard]"; - public static final Action ACTION_INSTANCE = new Action<>(ACTION_NAME) { + public static final Action ACTION_INSTANCE = new StreamableResponseAction<>(ACTION_NAME) { @Override public MultiTermVectorsShardResponse newResponse() { return new MultiTermVectorsShardResponse(); diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java index 1c8c80b61cef1..5835f7c88dbe1 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.update; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class UpdateAction extends Action { +public class UpdateAction extends StreamableResponseAction { public static final UpdateAction INSTANCE = new UpdateAction(); public static final String NAME = "indices:data/write/update"; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java index c7cfe28e2c0be..97c4708ec1eba 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class DeleteByQueryAction extends Action { +public class DeleteByQueryAction extends StreamableResponseAction { public static final DeleteByQueryAction INSTANCE = new DeleteByQueryAction(); public static final String NAME = "indices:data/write/delete/byquery"; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java index 86d0c96602a3c..945b080ca239b 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ReindexAction extends Action { +public class ReindexAction extends StreamableResponseAction { public static final ReindexAction INSTANCE = new ReindexAction(); public static final String NAME = "indices:data/write/reindex"; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java index 250a267ea255d..d758a9c411331 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class UpdateByQueryAction extends Action { +public class UpdateByQueryAction extends StreamableResponseAction { public static final UpdateByQueryAction INSTANCE = new UpdateByQueryAction(); public static final String NAME = "indices:data/write/update/byquery"; diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java index 3f292a7c8d156..a8805c1c5cdfe 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java @@ -19,10 +19,10 @@ package org.elasticsearch.index.seqno; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -123,7 +123,7 @@ protected boolean resolveIndex(final T request) { } - public static class Add extends Action { + public static class Add extends StreamableResponseAction { public static final Add INSTANCE = new Add(); public static final String ACTION_NAME = "indices:admin/seq_no/add_retention_lease"; @@ -176,7 +176,7 @@ public Response newResponse() { } - public static class Renew extends Action { + public static class Renew extends StreamableResponseAction { public static final Renew INSTANCE = new Renew(); public static final String ACTION_NAME = "indices:admin/seq_no/renew_retention_lease"; @@ -222,7 +222,7 @@ public Response newResponse() { } - public static class Remove extends Action { + public static class Remove extends StreamableResponseAction { public static final Remove INSTANCE = new Remove(); public static final String ACTION_NAME = "indices:admin/seq_no/remove_retention_lease"; diff --git a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java index d34374bb5202a..b43a60e3a1373 100644 --- a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.persistent; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeRequest; @@ -47,7 +47,7 @@ * Action that is used by executor node to indicate that the persistent action finished or failed on the node and needs to be * removed from the cluster state in case of successful completion or restarted on some other node in case of failure. */ -public class CompletionPersistentTaskAction extends Action { +public class CompletionPersistentTaskAction extends StreamableResponseAction { public static final CompletionPersistentTaskAction INSTANCE = new CompletionPersistentTaskAction(); public static final String NAME = "cluster:admin/persistent/completion"; diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index 8f122dab0c514..1ca3cc3dc09d0 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.persistent; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeRequest; @@ -41,7 +41,7 @@ import java.io.IOException; import java.util.Objects; -public class RemovePersistentTaskAction extends Action { +public class RemovePersistentTaskAction extends StreamableResponseAction { public static final RemovePersistentTaskAction INSTANCE = new RemovePersistentTaskAction(); public static final String NAME = "cluster:admin/persistent/remove"; diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index 1b0c973a3ce40..443c2582aef2a 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.persistent; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeRequest; @@ -47,7 +47,7 @@ /** * This action can be used to add the record for the persistent action to the cluster state. */ -public class StartPersistentTaskAction extends Action { +public class StartPersistentTaskAction extends StreamableResponseAction { public static final StartPersistentTaskAction INSTANCE = new StartPersistentTaskAction(); public static final String NAME = "cluster:admin/persistent/start"; diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index 22c7f74e3c8e7..5f8eb38a7c52f 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.persistent; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeRequest; @@ -43,7 +43,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class UpdatePersistentTaskStatusAction extends Action { +public class UpdatePersistentTaskStatusAction extends StreamableResponseAction { public static final UpdatePersistentTaskStatusAction INSTANCE = new UpdatePersistentTaskStatusAction(); public static final String NAME = "cluster:admin/persistent/update_status"; diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index ac07c18b04189..4cd74b4e51cbb 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -88,7 +88,7 @@ protected FakeTransportAction(String actionName, ActionFilters actionFilters, Ta protected void doExecute(Task task, FakeRequest request, ActionListener listener) { } } - class FakeAction extends Action { + class FakeAction extends StreamableResponseAction { protected FakeAction() { super("fake"); } diff --git a/server/src/test/java/org/elasticsearch/action/ActionTests.java b/server/src/test/java/org/elasticsearch/action/ActionTests.java index a7dca3f098d05..46a7f97653e3b 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionTests.java @@ -24,7 +24,7 @@ public class ActionTests extends ESTestCase { public void testEquals() { - class FakeAction extends Action { + class FakeAction extends StreamableResponseAction { protected FakeAction(String name) { super(name); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index c9b65714e62b9..4ece1a75c288b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; @@ -330,7 +331,7 @@ protected NodeResponse nodeOperation(NodeRequest request, Task task) { } } - public static class TestTaskAction extends Action { + public static class TestTaskAction extends StreamableResponseAction { public static final TestTaskAction INSTANCE = new TestTaskAction(); public static final String NAME = "cluster:admin/tasks/test"; @@ -464,11 +465,6 @@ private UnblockTestTasksAction() { super(NAME); } - @Override - public UnblockTestTasksResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return UnblockTestTasksResponse::new; diff --git a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java index b55253e208a91..e0dcf430785f3 100644 --- a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java +++ b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java @@ -19,11 +19,11 @@ package org.elasticsearch.indices.settings; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -63,7 +63,7 @@ public List> getSettings() { return Arrays.asList(INDEX_INTERNAL_SETTING, INDEX_PRIVATE_SETTING); } - public static class UpdateInternalOrPrivateAction extends Action { + public static class UpdateInternalOrPrivateAction extends StreamableResponseAction { public static final UpdateInternalOrPrivateAction INSTANCE = new UpdateInternalOrPrivateAction(); private static final String NAME = "indices:admin/settings/update-internal-or-private-index"; diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index 0b5c432d5978c..c0f7ac38e9aa3 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -404,11 +404,6 @@ private TestTaskAction() { super(NAME); } - @Override - public TestTasksResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return TestTasksResponse::new; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index 03495d34f2914..c9499e67a8932 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -61,11 +61,6 @@ private ShardChangesAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsAction.java index a85e5c50e840d..98f3e918503ac 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.ccr.action.bulk; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class BulkShardOperationsAction extends Action { +public class BulkShardOperationsAction extends StreamableResponseAction { public static final BulkShardOperationsAction INSTANCE = new BulkShardOperationsAction(); public static final String NAME = "indices:data/write/bulk_shard_operations[s]"; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/ClearCcrRestoreSessionAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/ClearCcrRestoreSessionAction.java index c4651e877fadd..5bdc1df755350 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/ClearCcrRestoreSessionAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/ClearCcrRestoreSessionAction.java @@ -29,11 +29,6 @@ private ClearCcrRestoreSessionAction() { super(NAME); } - @Override - public ClearCcrRestoreSessionResponse newResponse() { - throw new UnsupportedOperationException(); - } - @Override public Writeable.Reader getResponseReader() { return ClearCcrRestoreSessionResponse::new; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java index 93d432fe93fed..b6f014811851b 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java @@ -29,11 +29,6 @@ private DeleteInternalCcrRepositoryAction() { super(NAME); } - @Override - public DeleteInternalCcrRepositoryResponse newResponse() { - throw new UnsupportedOperationException(); - } - @Override public Writeable.Reader getResponseReader() { return DeleteInternalCcrRepositoryResponse::new; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java index 37dfc84f46a01..64ac8c21881a2 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java @@ -36,11 +36,6 @@ private GetCcrRestoreFileChunkAction() { super(NAME); } - @Override - public GetCcrRestoreFileChunkResponse newResponse() { - throw new UnsupportedOperationException(); - } - @Override public Writeable.Reader getResponseReader() { return GetCcrRestoreFileChunkResponse::new; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java index 91ec057ac4eb6..ed2d811b3b56b 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java @@ -39,11 +39,6 @@ private PutCcrRestoreSessionAction() { super(NAME); } - @Override - public PutCcrRestoreSessionResponse newResponse() { - return new PutCcrRestoreSessionResponse(); - } - @Override public Writeable.Reader getResponseReader() { return PutCcrRestoreSessionAction.PutCcrRestoreSessionResponse::new; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java index 397137ffb494a..40ba9ab34212d 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java @@ -29,11 +29,6 @@ private PutInternalCcrRepositoryAction() { super(NAME); } - @Override - public PutInternalCcrRepositoryResponse newResponse() { - throw new UnsupportedOperationException(); - } - @Override public Writeable.Reader getResponseReader() { return PutInternalCcrRepositoryResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java index 261f0318c985f..18430399572a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java @@ -18,11 +18,6 @@ private DeleteLicenseAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java index be97ff59172fd..3b7b6d45c1c5b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.license; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetBasicStatusAction extends Action { +public class GetBasicStatusAction extends StreamableResponseAction { public static final GetBasicStatusAction INSTANCE = new GetBasicStatusAction(); public static final String NAME = "cluster:admin/xpack/license/basic_status"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java index a6f19ea95b1e8..5db3c9cb335da 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.license; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetLicenseAction extends Action { +public class GetLicenseAction extends StreamableResponseAction { public static final GetLicenseAction INSTANCE = new GetLicenseAction(); public static final String NAME = "cluster:monitor/xpack/license/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java index 69c14e1b6dc83..2b47f25e5baae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.license; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class GetTrialStatusAction extends Action { +public class GetTrialStatusAction extends StreamableResponseAction { public static final GetTrialStatusAction INSTANCE = new GetTrialStatusAction(); public static final String NAME = "cluster:admin/xpack/license/trial_status"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java index 446ff45501bd6..4b8ac56df9658 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java @@ -17,11 +17,6 @@ private PostStartBasicAction() { super(NAME); } - @Override - public PostStartBasicResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return PostStartBasicResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java index 609fa42caabc2..385c2cb0975f4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.license; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class PostStartTrialAction extends Action { +public class PostStartTrialAction extends StreamableResponseAction { public static final PostStartTrialAction INSTANCE = new PostStartTrialAction(); public static final String NAME = "cluster:admin/xpack/license/start_trial"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java index 263b0a4c556e1..4106058e9f3df 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java @@ -18,11 +18,6 @@ private PutLicenseAction() { super(NAME); } - @Override - public PutLicenseResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return PutLicenseResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzerAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzerAction.java index f37df1ec820eb..e5b1bde1ef1de 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzerAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzerAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ReloadAnalyzerAction extends Action { +public class ReloadAnalyzerAction extends StreamableResponseAction { public static final ReloadAnalyzerAction INSTANCE = new ReloadAnalyzerAction(); public static final String NAME = "indices:admin/reload_analyzers"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java index 45e78022379e7..582cc72ec9226 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java @@ -236,11 +236,6 @@ private FreezeIndexAction() { super(NAME); } - @Override - public FreezeResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return FreezeResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java index a47ce7a41c698..935e773ee1688 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.protocol.xpack.XPackInfoResponse; -public class XPackInfoAction extends Action { +public class XPackInfoAction extends StreamableResponseAction { public static final String NAME = "cluster:monitor/xpack/info"; public static final XPackInfoAction INSTANCE = new XPackInfoAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java index fd8b2853ee85a..0602f0dcd12c9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.core.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.xpack.core.XPackField; import java.util.Arrays; @@ -18,7 +18,7 @@ * {@link XPackInfoAction} implementation iterates over the {@link #ALL} list of actions to form * the complete info result. */ -public class XPackInfoFeatureAction extends Action { +public class XPackInfoFeatureAction extends StreamableResponseAction { private static final String BASE_NAME = "cluster:monitor/xpack/info/"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java index 40311a4e88457..68c7d05fbe370 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class XPackUsageAction extends Action { +public class XPackUsageAction extends StreamableResponseAction { public static final String NAME = "cluster:monitor/xpack/usage"; public static final XPackUsageAction INSTANCE = new XPackUsageAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java index e1d473f7dcf9e..8f86e613b74d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.core.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.xpack.core.XPackField; import java.util.Arrays; @@ -18,7 +18,7 @@ * {@link XPackUsageAction} implementationn iterates over the {@link #ALL} list of actions to form * the complete usage result. */ -public class XPackUsageFeatureAction extends Action { +public class XPackUsageFeatureAction extends StreamableResponseAction { private static final String BASE_NAME = "cluster:monitor/xpack/usage/"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java index 5b2033443dcf7..494cf3ec42a9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java @@ -29,11 +29,6 @@ private CcrStatsAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java index e23e24d166482..3be27c7ff3fc8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java @@ -27,11 +27,6 @@ private DeleteAutoFollowPatternAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java index 122ae2b0a179d..2bf96e6702967 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java @@ -32,11 +32,6 @@ private FollowInfoAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java index 72353f405cfee..a1be7b8332529 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowStatsAction.java @@ -39,11 +39,6 @@ private FollowStatsAction() { super(NAME); } - @Override - public StatsResponses newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return StatsResponses::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ForgetFollowerAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ForgetFollowerAction.java index d2a0b565496d6..5ad9b60427f23 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ForgetFollowerAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ForgetFollowerAction.java @@ -6,8 +6,8 @@ package org.elasticsearch.xpack.core.ccr.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.ParseField; @@ -19,7 +19,7 @@ import java.io.IOException; import java.util.Objects; -public class ForgetFollowerAction extends Action { +public class ForgetFollowerAction extends StreamableResponseAction { public static final String NAME = "indices:admin/xpack/ccr/forget_follower"; public static final ForgetFollowerAction INSTANCE = new ForgetFollowerAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java index cd37692da43a3..81ce4093f6327 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java @@ -30,11 +30,6 @@ private GetAutoFollowPatternAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java index 748eb291f8572..690d37af0ada1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java @@ -26,11 +26,6 @@ private PauseFollowAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 716b999975cf7..db7c5c0512854 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -36,11 +36,6 @@ private PutAutoFollowPatternAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index 4d20e6d820de2..83eea15ffe85d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -36,11 +36,6 @@ private PutFollowAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java index f22d4de0c8bd8..fecbbe31d2b41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java @@ -32,11 +32,6 @@ private ResumeFollowAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java index e7804eb21be0d..832c6fcf1b24d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java @@ -29,11 +29,6 @@ private UnfollowAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java index 20f16a6a21fd4..cedd59e79e635 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java @@ -27,11 +27,6 @@ private DeleteDataFrameTransformAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsAction.java index d8bcf730012c6..47ef20ffd5003 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsAction.java @@ -40,11 +40,6 @@ private GetDataFrameTransformsAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java index 1ee6024c6b311..8b17327c7d546 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java @@ -42,11 +42,6 @@ public GetDataFrameTransformsStatsAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformAction.java index 6108136a87b7f..a01d0a50ac262 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformAction.java @@ -44,11 +44,6 @@ private PreviewDataFrameTransformAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformAction.java index ff9af3ab85fa2..a368ae0434094 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformAction.java @@ -38,11 +38,6 @@ private PutDataFrameTransformAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformAction.java index e2128a0f7180b..e091e6c346d43 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformAction.java @@ -31,11 +31,6 @@ private StartDataFrameTransformAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformTaskAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformTaskAction.java index fe453200fb2bc..8e7e393c6c641 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformTaskAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformTaskAction.java @@ -32,11 +32,6 @@ private StartDataFrameTransformTaskAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java index 0cbe7a45b636f..a3f79c5bc7499 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java @@ -43,11 +43,6 @@ private StopDataFrameTransformAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java index 54d260e32532f..aebb319c39f1d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.deprecation; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; @@ -39,7 +39,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class DeprecationInfoAction extends Action { +public class DeprecationInfoAction extends StreamableResponseAction { public static final DeprecationInfoAction INSTANCE = new DeprecationInfoAction(); public static final String NAME = "cluster:admin/xpack/deprecation/info"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/NodesDeprecationCheckAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/NodesDeprecationCheckAction.java index e27d70f972780..b7921ba9f4e20 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/NodesDeprecationCheckAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/NodesDeprecationCheckAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.deprecation; import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; @@ -23,7 +24,7 @@ * Runs deprecation checks on each node. Deprecation checks are performed locally so that filtered settings * can be accessed in the deprecation checks. */ -public class NodesDeprecationCheckAction extends Action { +public class NodesDeprecationCheckAction extends StreamableResponseAction { public static final NodesDeprecationCheckAction INSTANCE = new NodesDeprecationCheckAction(); public static final String NAME = "cluster:admin/xpack/deprecation/nodes/info"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java index e4fd8d0435106..31725b3c0c22f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.graph.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse; -public class GraphExploreAction extends Action { +public class GraphExploreAction extends StreamableResponseAction { public static final GraphExploreAction INSTANCE = new GraphExploreAction(); public static final String NAME = "indices:data/read/xpack/graph/explore"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/DeleteLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/DeleteLifecycleAction.java index ba0b5598403f5..0416925f6e053 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/DeleteLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/DeleteLifecycleAction.java @@ -27,11 +27,6 @@ protected DeleteLifecycleAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/ExplainLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/ExplainLifecycleAction.java index 5acbbcb4967f3..b08ea01ac66cd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/ExplainLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/ExplainLifecycleAction.java @@ -6,10 +6,10 @@ package org.elasticsearch.xpack.core.indexlifecycle.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.xpack.core.indexlifecycle.ExplainLifecycleResponse; -public class ExplainLifecycleAction extends Action { +public class ExplainLifecycleAction extends StreamableResponseAction { public static final ExplainLifecycleAction INSTANCE = new ExplainLifecycleAction(); public static final String NAME = "indices:admin/ilm/explain"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/GetLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/GetLifecycleAction.java index aaa295354a850..3d346384702cf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/GetLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/GetLifecycleAction.java @@ -6,9 +6,9 @@ package org.elasticsearch.xpack.core.indexlifecycle.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -23,7 +23,7 @@ import java.util.List; import java.util.Objects; -public class GetLifecycleAction extends Action { +public class GetLifecycleAction extends StreamableResponseAction { public static final GetLifecycleAction INSTANCE = new GetLifecycleAction(); public static final String NAME = "cluster:admin/ilm/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/GetStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/GetStatusAction.java index 40765f0aa6650..7fe301ff65e38 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/GetStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/GetStatusAction.java @@ -6,9 +6,9 @@ package org.elasticsearch.xpack.core.indexlifecycle.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Objects; -public class GetStatusAction extends Action { +public class GetStatusAction extends StreamableResponseAction { public static final GetStatusAction INSTANCE = new GetStatusAction(); public static final String NAME = "cluster:admin/ilm/operation_mode/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/MoveToStepAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/MoveToStepAction.java index 536e8534c9001..d62ebd8bb9ba3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/MoveToStepAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/MoveToStepAction.java @@ -32,11 +32,6 @@ protected MoveToStepAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleAction.java index 5920b5d8ef596..a7ca96ba83a0d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleAction.java @@ -31,11 +31,6 @@ protected PutLifecycleAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/RemoveIndexLifecyclePolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/RemoveIndexLifecyclePolicyAction.java index 0e530baa57f0c..2de8e31d1714b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/RemoveIndexLifecyclePolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/RemoveIndexLifecyclePolicyAction.java @@ -6,10 +6,10 @@ package org.elasticsearch.xpack.core.indexlifecycle.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.ParseField; @@ -24,7 +24,7 @@ import java.util.List; import java.util.Objects; -public class RemoveIndexLifecyclePolicyAction extends Action { +public class RemoveIndexLifecyclePolicyAction extends StreamableResponseAction { public static final RemoveIndexLifecyclePolicyAction INSTANCE = new RemoveIndexLifecyclePolicyAction(); public static final String NAME = "indices:admin/ilm/remove_policy"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/RetryAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/RetryAction.java index 25cce3e5cf0e9..227c131d2499a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/RetryAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/RetryAction.java @@ -30,11 +30,6 @@ protected RetryAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/StartILMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/StartILMAction.java index a55d14e3f362c..b0910ee399e77 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/StartILMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/StartILMAction.java @@ -18,11 +18,6 @@ protected StartILMAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/StopILMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/StopILMAction.java index fc4a7a469e47b..7345b9652f63b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/StopILMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/action/StopILMAction.java @@ -18,11 +18,6 @@ protected StopILMAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java index 95ec597bb9cd8..de49c59783bf5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java @@ -35,11 +35,6 @@ private CloseJobAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java index dac62ba09a5f9..cee7b3cf0f4e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java @@ -29,11 +29,6 @@ private DeleteCalendarAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java index 07c1575716c6b..a97ae6e69b8ed 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java @@ -30,11 +30,6 @@ private DeleteCalendarEventAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java index 9a777b23a4bb8..b7090051a3d63 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java @@ -31,11 +31,6 @@ private DeleteDataFrameAnalyticsAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java index 4b7de0d912f47..04aa405c6f6c3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java @@ -32,11 +32,6 @@ private DeleteDatafeedAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java index 271d8ad5fa33c..f9a661827b693 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteExpiredDataAction extends Action { +public class DeleteExpiredDataAction extends StreamableResponseAction { public static final DeleteExpiredDataAction INSTANCE = new DeleteExpiredDataAction(); public static final String NAME = "cluster:admin/xpack/ml/delete_expired_data"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java index 6f1a6e72613f7..564e6f59534b6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java @@ -30,11 +30,6 @@ private DeleteFilterAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java index 658cc8befeec6..055ae54702cc4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java @@ -29,11 +29,6 @@ private DeleteForecastAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index 37dcb8450f682..24c403f9397c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -33,11 +33,6 @@ private DeleteJobAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java index 1a5181898a4b4..850543cf95092 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java @@ -29,11 +29,6 @@ private DeleteModelSnapshotAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java index eec58428d55cd..f8a8efebb1ce0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -29,7 +29,7 @@ import java.util.List; import java.util.Objects; -public class EvaluateDataFrameAction extends Action { +public class EvaluateDataFrameAction extends StreamableResponseAction { public static final EvaluateDataFrameAction INSTANCE = new EvaluateDataFrameAction(); public static final String NAME = "cluster:monitor/xpack/ml/data_frame/evaluate"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java index 4e081ad1ceeed..bd9e0107c4b69 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java @@ -26,11 +26,6 @@ private FinalizeJobExecutionAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java index 5961a2305eaa1..beffcd08f2517 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.Version; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; @@ -31,7 +31,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class FindFileStructureAction extends Action { +public class FindFileStructureAction extends StreamableResponseAction { public static final FindFileStructureAction INSTANCE = new FindFileStructureAction(); public static final String NAME = "cluster:monitor/xpack/ml/findfilestructure"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java index b73d2b502a49d..e54be42ba342c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java @@ -34,11 +34,6 @@ private FlushJobAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java index fb107579c6e66..16010047f1575 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java @@ -33,11 +33,6 @@ private ForecastJobAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java index f0be0f74bfd00..a75fc44e27005 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -28,7 +28,7 @@ import java.io.IOException; import java.util.Objects; -public class GetBucketsAction extends Action { +public class GetBucketsAction extends StreamableResponseAction { public static final GetBucketsAction INSTANCE = new GetBucketsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/buckets/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java index c2d25590513eb..342fab6a77ce3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class GetCalendarEventsAction extends Action { +public class GetCalendarEventsAction extends StreamableResponseAction { public static final GetCalendarEventsAction INSTANCE = new GetCalendarEventsAction(); public static final String NAME = "cluster:monitor/xpack/ml/calendars/events/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java index 3165e7f29cae1..d172f88116f32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetCalendarsAction extends Action { +public class GetCalendarsAction extends StreamableResponseAction { public static final GetCalendarsAction INSTANCE = new GetCalendarsAction(); public static final String NAME = "cluster:monitor/xpack/ml/calendars/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java index 3048f7b2bdda5..b35fdc321de36 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -29,7 +29,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetCategoriesAction extends Action { +public class GetCategoriesAction extends StreamableResponseAction { public static final GetCategoriesAction INSTANCE = new GetCategoriesAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/categories/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java index 92233fbb27692..12772fd0962df 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -18,7 +18,7 @@ import java.io.IOException; import java.util.Collections; -public class GetDataFrameAnalyticsAction extends Action { +public class GetDataFrameAnalyticsAction extends StreamableResponseAction { public static final GetDataFrameAnalyticsAction INSTANCE = new GetDataFrameAnalyticsAction(); public static final String NAME = "cluster:admin/xpack/ml/data_frame/analytics/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsStatsAction.java index b14feaa8839f5..878b2c2d0dd21 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsStatsAction.java @@ -44,11 +44,6 @@ private GetDataFrameAnalyticsStatsAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 950fa58af95c8..ec385ab8e4534 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.ElasticsearchClient; @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Objects; -public class GetDatafeedsAction extends Action { +public class GetDatafeedsAction extends StreamableResponseAction { public static final GetDatafeedsAction INSTANCE = new GetDatafeedsAction(); public static final String NAME = "cluster:monitor/xpack/ml/datafeeds/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java index 39055501444f5..7d8996b3fc790 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.ElasticsearchClient; @@ -28,7 +28,7 @@ import java.util.Map; import java.util.Objects; -public class GetDatafeedsStatsAction extends Action { +public class GetDatafeedsStatsAction extends StreamableResponseAction { public static final GetDatafeedsStatsAction INSTANCE = new GetDatafeedsStatsAction(); public static final String NAME = "cluster:monitor/xpack/ml/datafeeds/stats/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java index d63dfc39d732b..5ad21ccd0d165 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; @@ -17,11 +17,10 @@ import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; - import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetFiltersAction extends Action { +public class GetFiltersAction extends StreamableResponseAction { public static final GetFiltersAction INSTANCE = new GetFiltersAction(); public static final String NAME = "cluster:admin/xpack/ml/filters/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java index a15336a97c09c..5c0e79e1fb0aa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -27,8 +27,7 @@ import java.io.IOException; import java.util.Objects; -public class GetInfluencersAction -extends Action { +public class GetInfluencersAction extends StreamableResponseAction { public static final GetInfluencersAction INSTANCE = new GetInfluencersAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/influencers/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java index 98b1eb7a118f9..510996d01897a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.ElasticsearchClient; @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Objects; -public class GetJobsAction extends Action { +public class GetJobsAction extends StreamableResponseAction { public static final GetJobsAction INSTANCE = new GetJobsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java index 4726f95c80dbc..0767913dfdcb6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java @@ -57,11 +57,6 @@ private GetJobsStatsAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java index 1fc93b68e1af7..edbb6f506a6b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -28,7 +28,7 @@ import java.io.IOException; import java.util.Objects; -public class GetModelSnapshotsAction extends Action { +public class GetModelSnapshotsAction extends StreamableResponseAction { public static final GetModelSnapshotsAction INSTANCE = new GetModelSnapshotsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/model_snapshots/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java index da1bc74da4859..8f35319d2489e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java @@ -6,10 +6,10 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -47,7 +47,7 @@ * the interval. *

    */ -public class GetOverallBucketsAction extends Action { +public class GetOverallBucketsAction extends StreamableResponseAction { public static final GetOverallBucketsAction INSTANCE = new GetOverallBucketsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/overall_buckets/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java index 46eeca8c70065..41d7447fc5b6d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -27,7 +27,7 @@ import java.io.IOException; import java.util.Objects; -public class GetRecordsAction extends Action { +public class GetRecordsAction extends StreamableResponseAction { public static final GetRecordsAction INSTANCE = new GetRecordsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/records/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java index 8f681472ee8b3..33667fd27f894 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java @@ -44,11 +44,6 @@ private IsolateDatafeedAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java index df54d693f75ff..f3f35f98ba134 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java @@ -25,11 +25,6 @@ private KillProcessAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java index b0d635202c9fd..43db4a057844c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -21,7 +21,7 @@ import java.util.Map; import java.util.Objects; -public class MlInfoAction extends Action { +public class MlInfoAction extends StreamableResponseAction { public static final MlInfoAction INSTANCE = new MlInfoAction(); public static final String NAME = "cluster:monitor/xpack/ml/info/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index 55694894790fa..84b2457d96e25 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -44,11 +44,6 @@ private OpenJobAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java index 3de585efc4649..6520e669e713f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java @@ -25,11 +25,6 @@ private PersistJobAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java index ae68b2fdb26ab..6c10561a40c48 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -29,7 +29,7 @@ import java.util.Objects; import java.util.stream.Collectors; -public class PostCalendarEventsAction extends Action { +public class PostCalendarEventsAction extends StreamableResponseAction { public static final PostCalendarEventsAction INSTANCE = new PostCalendarEventsAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/events/post"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java index 0393f2c463932..2b814f64cb926 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java @@ -33,11 +33,6 @@ private PostDataAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java index af8a99b9828bb..b1a48aea5d8bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -25,7 +25,7 @@ import java.io.InputStream; import java.util.Objects; -public class PreviewDatafeedAction extends Action { +public class PreviewDatafeedAction extends StreamableResponseAction { public static final PreviewDatafeedAction INSTANCE = new PreviewDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/preview"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java index 0314103a3006b..7d08662e4d953 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -27,7 +27,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class PutCalendarAction extends Action { +public class PutCalendarAction extends StreamableResponseAction { public static final PutCalendarAction INSTANCE = new PutCalendarAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java index e447aa70109e7..6034257eb8c83 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.Objects; -public class PutDataFrameAnalyticsAction extends Action { +public class PutDataFrameAnalyticsAction extends StreamableResponseAction { public static final PutDataFrameAnalyticsAction INSTANCE = new PutDataFrameAnalyticsAction(); public static final String NAME = "cluster:admin/xpack/ml/data_frame/analytics/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 1ac325b864536..fd12d19490774 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.Objects; -public class PutDatafeedAction extends Action { +public class PutDatafeedAction extends StreamableResponseAction { public static final PutDatafeedAction INSTANCE = new PutDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java index 0ed5e8f22aadb..940f5afd24df5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -25,7 +25,7 @@ import java.util.Objects; -public class PutFilterAction extends Action { +public class PutFilterAction extends StreamableResponseAction { public static final PutFilterAction INSTANCE = new PutFilterAction(); public static final String NAME = "cluster:admin/xpack/ml/filters/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index 2ae19c4f32250..f130cdfd5934f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; @@ -24,7 +24,7 @@ import java.util.List; import java.util.Objects; -public class PutJobAction extends Action { +public class PutJobAction extends StreamableResponseAction { public static final PutJobAction INSTANCE = new PutJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java index cae1efb7e7a31..6905357eefe9f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; @@ -28,7 +28,7 @@ import java.io.IOException; import java.util.Objects; -public class RevertModelSnapshotAction extends Action { +public class RevertModelSnapshotAction extends StreamableResponseAction { public static final RevertModelSnapshotAction INSTANCE = new RevertModelSnapshotAction(); public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/revert"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java index b3a595d6d112e..43737b12e5335 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java @@ -31,11 +31,6 @@ private SetUpgradeModeAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java index d722198bdfae6..fc5a22f2c6e04 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java @@ -43,11 +43,6 @@ private StartDataFrameAnalyticsAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index a4a4f4bd40aff..8d229f0a60a75 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -52,11 +52,6 @@ private StartDatafeedAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java index 43d382147fd64..f7010a5b43f2c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java @@ -42,11 +42,6 @@ private StopDataFrameAnalyticsAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java index 0021040c69801..8db85e4146ef4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java @@ -38,11 +38,6 @@ private StopDatafeedAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java index e70a2e3189b01..e4591860c637c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -18,7 +18,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateCalendarJobAction extends Action { +public class UpdateCalendarJobAction extends StreamableResponseAction { public static final UpdateCalendarJobAction INSTANCE = new UpdateCalendarJobAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/jobs/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java index 6ba34efa839b5..920c2861af537 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateDatafeedAction extends Action { +public class UpdateDatafeedAction extends StreamableResponseAction { public static final UpdateDatafeedAction INSTANCE = new UpdateDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java index 57b3d3457d736..e4869b4cb32bd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -32,7 +32,7 @@ import java.util.TreeSet; -public class UpdateFilterAction extends Action { +public class UpdateFilterAction extends StreamableResponseAction { public static final UpdateFilterAction INSTANCE = new UpdateFilterAction(); public static final String NAME = "cluster:admin/xpack/ml/filters/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java index 3e1b0ea6b3c55..f317910e76fdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateJobAction extends Action { +public class UpdateJobAction extends StreamableResponseAction { public static final UpdateJobAction INSTANCE = new UpdateJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java index 1414719693f2c..b450280f5ee82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateModelSnapshotAction extends Action { +public class UpdateModelSnapshotAction extends StreamableResponseAction { public static final UpdateModelSnapshotAction INSTANCE = new UpdateModelSnapshotAction(); public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java index 6a8e1703ad1f2..922bcfbfe309d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java @@ -32,11 +32,6 @@ private UpdateProcessAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java index 84ea265fba649..4d6c61d8ebc73 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java @@ -31,11 +31,6 @@ protected ValidateDetectorAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java index 48da2603c70ae..80a7f9fd6cab1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java @@ -32,11 +32,6 @@ protected ValidateJobConfigAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java index 49fb085191e4e..3cb68b78177b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.monitoring.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class MonitoringBulkAction extends Action { +public class MonitoringBulkAction extends StreamableResponseAction { public static final MonitoringBulkAction INSTANCE = new MonitoringBulkAction(); public static final String NAME = "cluster:admin/xpack/monitoring/bulk"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java index 92a1a07ded027..a17c5314b1c9d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java @@ -38,11 +38,6 @@ private DeleteRollupJobAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java index f544c21a15c9a..fb2345847feb6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.rollup.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParseField; @@ -17,7 +17,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -28,7 +27,7 @@ import java.util.Map; import java.util.Objects; -public class GetRollupCapsAction extends Action { +public class GetRollupCapsAction extends StreamableResponseAction { public static final GetRollupCapsAction INSTANCE = new GetRollupCapsAction(); public static final String NAME = "cluster:monitor/xpack/rollup/get/caps"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java index 9dcd673c39fb2..60bc70247a9d7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java @@ -6,12 +6,12 @@ package org.elasticsearch.xpack.core.rollup.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; @@ -19,7 +19,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -31,7 +30,7 @@ import java.util.Map; import java.util.Objects; -public class GetRollupIndexCapsAction extends Action { +public class GetRollupIndexCapsAction extends StreamableResponseAction { public static final GetRollupIndexCapsAction INSTANCE = new GetRollupIndexCapsAction(); public static final String NAME = "indices:data/read/xpack/rollup/get/index/caps"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java index 913e544e74190..13bd4c231e1c5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java @@ -47,11 +47,6 @@ private GetRollupJobsAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java index d9900e53eff73..2fb2444ebfcb3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java @@ -35,11 +35,6 @@ private PutRollupJobAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java index c6eecca5e3d80..0e80c9edf2d81 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java @@ -21,11 +21,6 @@ private RollupSearchAction() { super(NAME); } - @Override - public SearchResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return SearchResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java index ca2a5cd8d7264..dbe5a1ea59c1b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java @@ -33,11 +33,6 @@ private StartRollupJobAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java index 6b4a743ef2b3f..fb1dfc4458dde 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java @@ -39,11 +39,6 @@ private StopRollupJobAction() { super(NAME); } - @Override - public Response newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return Response::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/CreateApiKeyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/CreateApiKeyAction.java index 5d211ea70b522..1497962baf6e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/CreateApiKeyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/CreateApiKeyAction.java @@ -21,11 +21,6 @@ private CreateApiKeyAction() { super(NAME); } - @Override - public CreateApiKeyResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return CreateApiKeyResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GetApiKeyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GetApiKeyAction.java index 2af331909a3af..d405c78e6d260 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GetApiKeyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GetApiKeyAction.java @@ -21,11 +21,6 @@ private GetApiKeyAction() { super(NAME); } - @Override - public GetApiKeyResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return GetApiKeyResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/InvalidateApiKeyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/InvalidateApiKeyAction.java index 0f5c7e66e724c..de51379fca515 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/InvalidateApiKeyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/InvalidateApiKeyAction.java @@ -21,11 +21,6 @@ private InvalidateApiKeyAction() { super(NAME); } - @Override - public InvalidateApiKeyResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return InvalidateApiKeyResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java index b27a71e202e55..90397d4146532 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java @@ -20,11 +20,6 @@ private OpenIdConnectAuthenticateAction() { super(NAME); } - @Override - public OpenIdConnectAuthenticateResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return OpenIdConnectAuthenticateResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutAction.java index 482484a7dedee..18d6f73ecb65a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutAction.java @@ -17,11 +17,6 @@ private OpenIdConnectLogoutAction() { super(NAME); } - @Override - public OpenIdConnectLogoutResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return OpenIdConnectLogoutResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java index 2aa82c7286cec..c1e0c3586a603 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java @@ -17,11 +17,6 @@ private OpenIdConnectPrepareAuthenticationAction() { super(NAME); } - @Override - public OpenIdConnectPrepareAuthenticationResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return OpenIdConnectPrepareAuthenticationResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesAction.java index a36d2fdec74c4..303dc37b1b610 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.privilege; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for deleting application privileges. */ -public final class DeletePrivilegesAction extends Action { +public final class DeletePrivilegesAction extends StreamableResponseAction { public static final DeletePrivilegesAction INSTANCE = new DeletePrivilegesAction(); public static final String NAME = "cluster:admin/xpack/security/privilege/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesAction.java index 0b8743228c523..b7a9af806d685 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.privilege; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for retrieving one or more application privileges from the security index */ -public final class GetPrivilegesAction extends Action { +public final class GetPrivilegesAction extends StreamableResponseAction { public static final GetPrivilegesAction INSTANCE = new GetPrivilegesAction(); public static final String NAME = "cluster:admin/xpack/security/privilege/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java index 3743bec144f29..21532514bc4b3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.privilege; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for putting (adding/updating) one or more application privileges. */ -public final class PutPrivilegesAction extends Action { +public final class PutPrivilegesAction extends StreamableResponseAction { public static final PutPrivilegesAction INSTANCE = new PutPrivilegesAction(); public static final String NAME = "cluster:admin/xpack/security/privilege/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java index 7c3cd58a7f467..481cb82b30403 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.security.action.realm; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ClearRealmCacheAction extends Action { +public class ClearRealmCacheAction extends StreamableResponseAction { public static final ClearRealmCacheAction INSTANCE = new ClearRealmCacheAction(); public static final String NAME = "cluster:admin/xpack/security/realm/cache/clear"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java index 096b5380181fb..b5ec964d64b76 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.role; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * The action for clearing the cache used by native roles that are stored in an index. */ -public class ClearRolesCacheAction extends Action { +public class ClearRolesCacheAction extends StreamableResponseAction { public static final ClearRolesCacheAction INSTANCE = new ClearRolesCacheAction(); public static final String NAME = "cluster:admin/xpack/security/roles/cache/clear"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java index 6130f107fb726..c58a78b76578c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.role; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for deleting a role from the security index */ -public class DeleteRoleAction extends Action { +public class DeleteRoleAction extends StreamableResponseAction { public static final DeleteRoleAction INSTANCE = new DeleteRoleAction(); public static final String NAME = "cluster:admin/xpack/security/role/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java index 53126440afb9b..cc05e314b15b5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.role; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action to retrieve a role from the security index */ -public class GetRolesAction extends Action { +public class GetRolesAction extends StreamableResponseAction { public static final GetRolesAction INSTANCE = new GetRolesAction(); public static final String NAME = "cluster:admin/xpack/security/role/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java index 8396625e262ff..692429bc0e853 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.role; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for adding a role to the security index */ -public class PutRoleAction extends Action { +public class PutRoleAction extends StreamableResponseAction { public static final PutRoleAction INSTANCE = new PutRoleAction(); public static final String NAME = "cluster:admin/xpack/security/role/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java index 6057daf959531..43fab6a5a6f52 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java @@ -5,13 +5,13 @@ */ package org.elasticsearch.xpack.core.security.action.rolemapping; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for deleting a role-mapping from the * org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ -public class DeleteRoleMappingAction extends Action { +public class DeleteRoleMappingAction extends StreamableResponseAction { public static final DeleteRoleMappingAction INSTANCE = new DeleteRoleMappingAction(); public static final String NAME = "cluster:admin/xpack/security/role_mapping/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java index e1488bf70913e..39f410e42c181 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java @@ -5,14 +5,14 @@ */ package org.elasticsearch.xpack.core.security.action.rolemapping; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action to retrieve one or more role-mappings from X-Pack security * * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ -public class GetRoleMappingsAction extends Action { +public class GetRoleMappingsAction extends StreamableResponseAction { public static final GetRoleMappingsAction INSTANCE = new GetRoleMappingsAction(); public static final String NAME = "cluster:admin/xpack/security/role_mapping/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java index 9c3068adf127f..8464c1f817731 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.rolemapping; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for adding a role to the security index */ -public class PutRoleMappingAction extends Action { +public class PutRoleMappingAction extends StreamableResponseAction { public static final PutRoleMappingAction INSTANCE = new PutRoleMappingAction(); public static final String NAME = "cluster:admin/xpack/security/role_mapping/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java index fca733a3938a7..7cf767fbf8290 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.saml; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for authenticating using SAML assertions */ -public final class SamlAuthenticateAction extends Action { +public final class SamlAuthenticateAction extends StreamableResponseAction { public static final String NAME = "cluster:admin/xpack/security/saml/authenticate"; public static final SamlAuthenticateAction INSTANCE = new SamlAuthenticateAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java index dc5aa09627564..4b937657c676d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.saml; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action to perform IdP-initiated logout for a SAML-SSO user */ -public final class SamlInvalidateSessionAction extends Action { +public final class SamlInvalidateSessionAction extends StreamableResponseAction { public static final String NAME = "cluster:admin/xpack/security/saml/invalidate"; public static final SamlInvalidateSessionAction INSTANCE = new SamlInvalidateSessionAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java index 9ea3a29ca4ad9..994064950dbd6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.saml; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for initiating a logout process for a SAML-SSO user */ -public final class SamlLogoutAction extends Action { +public final class SamlLogoutAction extends StreamableResponseAction { public static final String NAME = "cluster:admin/xpack/security/saml/logout"; public static final SamlLogoutAction INSTANCE = new SamlLogoutAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java index 12ad23ca50199..035530bf7528f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.saml; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for initiating an authentication process using SAML assertions */ -public final class SamlPrepareAuthenticationAction extends Action { +public final class SamlPrepareAuthenticationAction extends StreamableResponseAction { public static final String NAME = "cluster:admin/xpack/security/saml/prepare"; public static final SamlPrepareAuthenticationAction INSTANCE = new SamlPrepareAuthenticationAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java index 7b913f594e582..2e5f25775f555 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.token; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for creating a new token */ -public final class CreateTokenAction extends Action { +public final class CreateTokenAction extends StreamableResponseAction { public static final String NAME = "cluster:admin/xpack/security/token/create"; public static final CreateTokenAction INSTANCE = new CreateTokenAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java index 57bd5bd35dd0c..77538144b75f7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.token; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for invalidating one or more tokens */ -public final class InvalidateTokenAction extends Action { +public final class InvalidateTokenAction extends StreamableResponseAction { public static final String NAME = "cluster:admin/xpack/security/token/invalidate"; public static final InvalidateTokenAction INSTANCE = new InvalidateTokenAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java index 3478af2ec00f2..f57720228a91b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.security.action.token; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public final class RefreshTokenAction extends Action { +public final class RefreshTokenAction extends StreamableResponseAction { public static final String NAME = "cluster:admin/xpack/security/token/refresh"; public static final RefreshTokenAction INSTANCE = new RefreshTokenAction(); @@ -20,4 +20,4 @@ private RefreshTokenAction() { public CreateTokenResponse newResponse() { return new CreateTokenResponse(); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java index 18cfe85c8cb0c..b0b98023399c8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.security.action.user; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class AuthenticateAction extends Action { +public class AuthenticateAction extends StreamableResponseAction { public static final String NAME = "cluster:admin/xpack/security/user/authenticate"; public static final AuthenticateAction INSTANCE = new AuthenticateAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java index d01717a64eadc..afac98c6e83e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.security.action.user; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class ChangePasswordAction extends Action { +public class ChangePasswordAction extends StreamableResponseAction { public static final ChangePasswordAction INSTANCE = new ChangePasswordAction(); public static final String NAME = "cluster:admin/xpack/security/user/change_password"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java index 78666759dc0a7..ebd378ba1d4eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.user; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for deleting a native user. */ -public class DeleteUserAction extends Action { +public class DeleteUserAction extends StreamableResponseAction { public static final DeleteUserAction INSTANCE = new DeleteUserAction(); public static final String NAME = "cluster:admin/xpack/security/user/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesAction.java index 6d51d74d89962..027a5e2d1ff6b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.user; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action that lists the set of privileges held by a user. */ -public final class GetUserPrivilegesAction extends Action { +public final class GetUserPrivilegesAction extends StreamableResponseAction { public static final GetUserPrivilegesAction INSTANCE = new GetUserPrivilegesAction(); public static final String NAME = "cluster:admin/xpack/security/user/list_privileges"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java index 49532049ba908..78a0750c0b958 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.user; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for retrieving a user from the security index */ -public class GetUsersAction extends Action { +public class GetUsersAction extends StreamableResponseAction { public static final GetUsersAction INSTANCE = new GetUsersAction(); public static final String NAME = "cluster:admin/xpack/security/user/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java index 30bb44a2c1c33..f97006a865276 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java @@ -5,14 +5,14 @@ */ package org.elasticsearch.xpack.core.security.action.user; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; /** * This action is testing whether a user has the specified * {@link RoleDescriptor.IndicesPrivileges privileges} */ -public class HasPrivilegesAction extends Action { +public class HasPrivilegesAction extends StreamableResponseAction { public static final HasPrivilegesAction INSTANCE = new HasPrivilegesAction(); public static final String NAME = "cluster:admin/xpack/security/user/has_privileges"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java index 20bbde2366b5e..ad0ad300b50f3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.user; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Action for putting (adding/updating) a native user. */ -public class PutUserAction extends Action { +public class PutUserAction extends StreamableResponseAction { public static final PutUserAction INSTANCE = new PutUserAction(); public static final String NAME = "cluster:admin/xpack/security/user/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java index 0368cdf7d7dbc..dad4d5ce2f567 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.security.action.user; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * This action is for setting the enabled flag on a native or reserved user */ -public class SetEnabledAction extends Action { +public class SetEnabledAction extends StreamableResponseAction { public static final SetEnabledAction INSTANCE = new SetEnabledAction(); public static final String NAME = "cluster:admin/xpack/security/user/set_enabled"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java index 4e1a84773db7d..64e4f8730fd5e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ssl.action; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -25,7 +25,7 @@ * Action to obtain information about X.509 (SSL/TLS) certificates that are being used by X-Pack. * The primary use case is for tracking the expiry dates of certificates. */ -public class GetCertificateInfoAction extends Action { +public class GetCertificateInfoAction extends StreamableResponseAction { public static final GetCertificateInfoAction INSTANCE = new GetCertificateInfoAction(); public static final String NAME = "cluster:monitor/xpack/ssl/certificates/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java index 89279f4ea31cf..370b137e572a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.core.upgrade.actions; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; @@ -26,7 +26,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xpack.core.upgrade.IndexUpgradeServiceFields.UPGRADE_INDEX_OPTIONS; -public class IndexUpgradeAction extends Action { +public class IndexUpgradeAction extends StreamableResponseAction { public static final IndexUpgradeAction INSTANCE = new IndexUpgradeAction(); public static final String NAME = "cluster:admin/xpack/upgrade"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java index 3044c953a3e09..9ace42634d770 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java @@ -5,14 +5,14 @@ */ package org.elasticsearch.xpack.core.upgrade.actions; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; -public class IndexUpgradeInfoAction extends Action { +public class IndexUpgradeInfoAction extends StreamableResponseAction { public static final IndexUpgradeInfoAction INSTANCE = new IndexUpgradeInfoAction(); public static final String NAME = "cluster:admin/xpack/upgrade/info"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java index 04ec95a369af2..1133bb7731b9e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.watcher.transport.actions.ack; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * This action acks a watch in memory, and the index */ -public class AckWatchAction extends Action { +public class AckWatchAction extends StreamableResponseAction { public static final AckWatchAction INSTANCE = new AckWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/ack"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java index 936a21711547e..366aba32658a9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.watcher.transport.actions.activate; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * This action acks a watch in memory, and the index */ -public class ActivateWatchAction extends Action { +public class ActivateWatchAction extends StreamableResponseAction { public static final ActivateWatchAction INSTANCE = new ActivateWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/activate"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java index eb440ddc2510e..5572572984c86 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java @@ -5,13 +5,13 @@ */ package org.elasticsearch.xpack.core.watcher.transport.actions.delete; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; /** * This action deletes an watch from in memory, the scheduler and the index */ -public class DeleteWatchAction extends Action { +public class DeleteWatchAction extends StreamableResponseAction { public static final DeleteWatchAction INSTANCE = new DeleteWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java index 924f170959426..e11b0aabe2f74 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java @@ -5,13 +5,13 @@ */ package org.elasticsearch.xpack.core.watcher.transport.actions.execute; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * This action executes a watch, either ignoring the schedule and condition or just the schedule and can execute a subset of the actions, * optionally persisting the history entry */ -public class ExecuteWatchAction extends Action { +public class ExecuteWatchAction extends StreamableResponseAction { public static final ExecuteWatchAction INSTANCE = new ExecuteWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/execute"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java index 4df72a964b65f..99bf6c3277e3e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java @@ -5,10 +5,12 @@ */ package org.elasticsearch.xpack.core.watcher.transport.actions.get; +import org.elasticsearch.action.StreamableResponseAction; + /** * This action gets an watch by name */ -public class GetWatchAction extends org.elasticsearch.action.Action { +public class GetWatchAction extends StreamableResponseAction { public static final GetWatchAction INSTANCE = new GetWatchAction(); public static final String NAME = "cluster:monitor/xpack/watcher/watch/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java index 509116b018e2b..d2771ca2b6ca0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java @@ -5,13 +5,13 @@ */ package org.elasticsearch.xpack.core.watcher.transport.actions.put; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; /** * This action puts an watch into the watch index and adds it to the scheduler */ -public class PutWatchAction extends Action { +public class PutWatchAction extends StreamableResponseAction { public static final PutWatchAction INSTANCE = new PutWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java index 4f85df827d705..61d7704a60936 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java @@ -19,11 +19,6 @@ private WatcherServiceAction() { super(NAME); } - @Override - public AcknowledgedResponse newResponse() { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - @Override public Writeable.Reader getResponseReader() { return AcknowledgedResponse::new; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java index 59fcff090f59e..065e95dcb12e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.core.watcher.transport.actions.stats; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * This Action gets the stats for the watcher plugin */ -public class WatcherStatsAction extends Action { +public class WatcherStatsAction extends StreamableResponseAction { public static final WatcherStatsAction INSTANCE = new WatcherStatsAction(); public static final String NAME = "cluster:monitor/xpack/watcher/stats/dist"; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java index 9983dd76d63f2..b64bc42cb2c63 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java @@ -123,13 +123,12 @@ public MockClientBuilder addClusterStatusYellowResponse(String index) throws Int @SuppressWarnings({ "unchecked" }) public MockClientBuilder addIndicesDeleteResponse(String index, boolean exists, boolean exception, ActionListener actionListener) throws InterruptedException, ExecutionException, IOException { - AcknowledgedResponse response = DeleteIndexAction.INSTANCE.newResponse(); StreamInput si = mock(StreamInput.class); // this looks complicated but Mockito can't mock the final method // DeleteIndexResponse.isAcknowledged() and the only way to create // one with a true response is reading from a stream. when(si.readByte()).thenReturn((byte) 0x01); - response.readFrom(si); + AcknowledgedResponse response = DeleteIndexAction.INSTANCE.getResponseReader().read(si); doAnswer(invocation -> { DeleteIndexRequest deleteIndexRequest = (DeleteIndexRequest) invocation.getArguments()[0]; diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java index ee824fc04e9f5..ccab2058374e4 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.sql.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class SqlClearCursorAction extends Action { +public class SqlClearCursorAction extends StreamableResponseAction { public static final SqlClearCursorAction INSTANCE = new SqlClearCursorAction(); public static final String NAME = "indices:data/read/sql/close_cursor"; diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java index f25eef31d3dc3..d40cdc55b9698 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.sql.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class SqlQueryAction extends Action { +public class SqlQueryAction extends StreamableResponseAction { public static final SqlQueryAction INSTANCE = new SqlQueryAction(); public static final String NAME = "indices:data/read/sql"; diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java index 2431ecc1edf8c..15958516a5d75 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.sql.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; /** * Sql action for translating SQL queries into ES requests */ -public class SqlTranslateAction extends Action { +public class SqlTranslateAction extends StreamableResponseAction { public static final SqlTranslateAction INSTANCE = new SqlTranslateAction(); public static final String NAME = "indices:data/read/sql/translate"; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsAction.java index 5f29c743325cc..135a1c6277cc7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsAction.java @@ -6,9 +6,9 @@ package org.elasticsearch.xpack.sql.plugin; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.StreamableResponseAction; -public class SqlStatsAction extends Action { +public class SqlStatsAction extends StreamableResponseAction { public static final SqlStatsAction INSTANCE = new SqlStatsAction(); public static final String NAME = "cluster:monitor/xpack/sql/stats/dist"; From 956b6306e7ec2f71fcc9909d3ab3dec5d7d27b9c Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Sat, 29 Jun 2019 09:20:32 +0300 Subject: [PATCH 071/140] [ML] Rename df-analytics `_id_copy` to `ml__id_copy` (#43754) Renames `_id_copy` to `ml__id_copy` as field names starting with underscore are deprecated. The new field name `ml__id_copy` was chosen as an obscure enough field that users won't have in their data. Otherwise, this field is only intented to be used by df-analytics. --- .../integration/RunDataFrameAnalyticsIT.java | 6 +++++ .../dataframe/DataFrameAnalyticsFields.java | 20 --------------- .../ml/dataframe/DataFrameAnalyticsIndex.java | 25 +++++++++++++------ .../dataframe/DataFrameAnalyticsManager.java | 2 +- .../extractor/DataFrameDataExtractor.java | 4 +-- .../DataFrameAnalyticsIndexTests.java | 4 +-- .../DataFrameDataExtractorTests.java | 2 +- 7 files changed, 29 insertions(+), 34 deletions(-) delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsFields.java diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index 5696edcf6460c..c7295ce24db56 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -91,7 +91,10 @@ public void testOutlierDetectionWithFewDocuments() throws Exception { assertThat(destDoc.get(field), equalTo(sourceDoc.get(field))); } assertThat(destDoc.containsKey("ml"), is(true)); + + @SuppressWarnings("unchecked") Map resultsObject = (Map) destDoc.get("ml"); + assertThat(resultsObject.containsKey("outlier_score"), is(true)); double outlierScore = (double) resultsObject.get("outlier_score"); assertThat(outlierScore, allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(100.0))); @@ -209,7 +212,10 @@ public void testOutlierDetectionWithMoreFieldsThanDocValueFieldLimit() throws Ex assertThat(destDoc.get(field), equalTo(sourceDoc.get(field))); } assertThat(destDoc.containsKey("ml"), is(true)); + + @SuppressWarnings("unchecked") Map resultsObject = (Map) destDoc.get("ml"); + assertThat(resultsObject.containsKey("outlier_score"), is(true)); double outlierScore = (double) resultsObject.get("outlier_score"); assertThat(outlierScore, allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(100.0))); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsFields.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsFields.java deleted file mode 100644 index 4ade30ae68b4e..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsFields.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.dataframe; - -public final class DataFrameAnalyticsFields { - - public static final String ID = "_id_copy"; - - // Metadata fields - static final String CREATION_DATE_MILLIS = "creation_date_in_millis"; - static final String VERSION = "version"; - static final String CREATED = "created"; - static final String CREATED_BY = "created_by"; - static final String ANALYTICS = "analytics"; - - private DataFrameAnalyticsFields() {} -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java index a682f259358f3..661525623575d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndex.java @@ -41,7 +41,16 @@ /** * {@link DataFrameAnalyticsIndex} class encapsulates logic for creating destination index based on source index metadata. */ -final class DataFrameAnalyticsIndex { +public final class DataFrameAnalyticsIndex { + + public static final String ID_COPY = "ml__id_copy"; + + // Metadata fields + static final String CREATION_DATE_MILLIS = "creation_date_in_millis"; + static final String VERSION = "version"; + static final String CREATED = "created"; + static final String CREATED_BY = "created_by"; + static final String ANALYTICS = "analytics"; private static final String PROPERTIES = "properties"; private static final String META = "_meta"; @@ -121,7 +130,7 @@ private static Settings settings(GetSettingsResponse settingsResponse) { Integer maxNumberOfReplicas = findMaxSettingValue(settingsResponse, IndexMetaData.SETTING_NUMBER_OF_REPLICAS); Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), DataFrameAnalyticsFields.ID); + settingsBuilder.put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), ID_COPY); settingsBuilder.put(IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey(), SortOrder.ASC); if (maxNumberOfShards != null) { settingsBuilder.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, maxNumberOfShards); @@ -148,15 +157,15 @@ private static Integer findMaxSettingValue(GetSettingsResponse settingsResponse, private static void addProperties(Map mappingsAsMap) { Map properties = getOrPutDefault(mappingsAsMap, PROPERTIES, HashMap::new); - properties.put(DataFrameAnalyticsFields.ID, Map.of("type", "keyword")); + properties.put(ID_COPY, Map.of("type", "keyword")); } private static void addMetaData(Map mappingsAsMap, String analyticsId, Clock clock) { Map metadata = getOrPutDefault(mappingsAsMap, META, HashMap::new); - metadata.put(DataFrameAnalyticsFields.CREATION_DATE_MILLIS, clock.millis()); - metadata.put(DataFrameAnalyticsFields.CREATED_BY, "data-frame-analytics"); - metadata.put(DataFrameAnalyticsFields.VERSION, Map.of(DataFrameAnalyticsFields.CREATED, Version.CURRENT)); - metadata.put(DataFrameAnalyticsFields.ANALYTICS, analyticsId); + metadata.put(CREATION_DATE_MILLIS, clock.millis()); + metadata.put(CREATED_BY, "data-frame-analytics"); + metadata.put(VERSION, Map.of(CREATED, Version.CURRENT)); + metadata.put(ANALYTICS, analyticsId); } private static V getOrPutDefault(Map map, K key, Supplier valueSupplier) { @@ -176,7 +185,7 @@ public static void updateMappingsToDestIndex(Client client, DataFrameAnalyticsCo ImmutableOpenMap mappings = getIndexResponse.getMappings().get(getIndexResponse.indices()[0]); String type = mappings.keysIt().next(); - Map addedMappings = Map.of(PROPERTIES, Map.of(DataFrameAnalyticsFields.ID, Map.of("type", "keyword"))); + Map addedMappings = Map.of(PROPERTIES, Map.of(ID_COPY, Map.of("type", "keyword"))); PutMappingRequest putMappingRequest = new PutMappingRequest(getIndexResponse.indices()); putMappingRequest.type(type); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java index c7cfe2b625369..9132e0f8192d7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java @@ -150,7 +150,7 @@ private void reindexDataframeAndStartAnalysis(DataFrameAnalyticsTask task, DataF reindexRequest.setSourceIndices(config.getSource().getIndex()); reindexRequest.setSourceQuery(config.getSource().getParsedQuery()); reindexRequest.setDestIndex(config.getDest().getIndex()); - reindexRequest.setScript(new Script("ctx._source." + DataFrameAnalyticsFields.ID + " = ctx._id")); + reindexRequest.setScript(new Script("ctx._source." + DataFrameAnalyticsIndex.ID_COPY + " = ctx._id")); final ThreadContext threadContext = client.threadPool().getThreadContext(); final Supplier supplier = threadContext.newRestorableContext(false); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index 59cd78b4cc6fa..fa18f3bb25b3c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -23,7 +23,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.ml.datafeed.extractor.fields.ExtractedField; -import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsFields; +import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsIndex; import java.io.IOException; import java.util.ArrayList; @@ -126,7 +126,7 @@ private SearchRequestBuilder buildSearchRequest() { .setScroll(SCROLL_TIMEOUT) // This ensures the search throws if there are failures and the scroll context gets cleared automatically .setAllowPartialSearchResults(false) - .addSort(DataFrameAnalyticsFields.ID, SortOrder.ASC) + .addSort(DataFrameAnalyticsIndex.ID_COPY, SortOrder.ASC) .setIndices(context.indices) .setSize(context.scrollSize) .setQuery(context.query); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java index bd6bdb3552cce..b81df21e4c63a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsIndexTests.java @@ -159,12 +159,12 @@ public void testCreateDestinationIndex() throws IOException { containsInAnyOrder("index.number_of_shards", "index.number_of_replicas", "index.sort.field", "index.sort.order")); assertThat(createIndexRequest.settings().getAsInt("index.number_of_shards", -1), equalTo(5)); assertThat(createIndexRequest.settings().getAsInt("index.number_of_replicas", -1), equalTo(1)); - assertThat(createIndexRequest.settings().get("index.sort.field"), equalTo("_id_copy")); + assertThat(createIndexRequest.settings().get("index.sort.field"), equalTo("ml__id_copy")); assertThat(createIndexRequest.settings().get("index.sort.order"), equalTo("asc")); try (XContentParser parser = createParser(JsonXContent.jsonXContent, createIndexRequest.mappings().get("_doc"))) { Map map = parser.map(); - assertThat(extractValue("_doc.properties._id_copy.type", map), equalTo("keyword")); + assertThat(extractValue("_doc.properties.ml__id_copy.type", map), equalTo("keyword")); assertThat(extractValue("_doc.properties.field_1", map), equalTo("field_1_mappings")); assertThat(extractValue("_doc.properties.field_2", map), equalTo("field_2_mappings")); assertThat(extractValue("_doc._meta.analytics", map), equalTo(ANALYTICS_ID)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java index 6b0e88d759b81..47c5aa26390a5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java @@ -127,7 +127,7 @@ public void testTwoPageExtraction() throws IOException { assertThat(searchRequest, containsString("\"query\":{\"match_all\":{\"boost\":1.0}}")); assertThat(searchRequest, containsString("\"docvalue_fields\":[{\"field\":\"field_1\"},{\"field\":\"field_2\"}]")); assertThat(searchRequest, containsString("\"_source\":{\"includes\":[],\"excludes\":[]}")); - assertThat(searchRequest, containsString("\"sort\":[{\"_id_copy\":{\"order\":\"asc\"}}]")); + assertThat(searchRequest, containsString("\"sort\":[{\"ml__id_copy\":{\"order\":\"asc\"}}]")); // Check continue scroll requests had correct ids assertThat(dataExtractor.capturedContinueScrollIds.size(), equalTo(2)); From c108b093b5a7da9b042d25969fd7339b86adc19f Mon Sep 17 00:00:00 2001 From: David Roberts Date: Sat, 29 Jun 2019 07:51:29 +0100 Subject: [PATCH 072/140] [ML] Assert that a no-op job creates no results nor state (#43681) If a job is opened and then closed and does nothing in between then it should not persist any results or state documents. This change adapts the no-op job test to assert no results in addition to no state, and to log any documents that cause this assertion to fail. Relates elastic/ml-cpp#512 Relates #43680 --- .../xpack/ml/integration/PersistJobIT.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java index a68fa2fe02a8d..b9666306911c7 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java @@ -23,6 +23,7 @@ import java.util.List; import java.util.stream.Collectors; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -178,13 +179,12 @@ public void testPersistJobOnGracefulShutdown_givenNoDataAndNoTimeAdvance() throw closeJob(jobId); // Check that state has not been persisted - SearchResponse stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setFetchSource(false) - .setTrackTotalHits(true) - .setSize(10000) - .get(); + SearchResponse stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).get(); + assertThat(Arrays.asList(stateDocsResponse.getHits().getHits()), empty()); - assertThat(stateDocsResponse.getHits().getTotalHits().value, equalTo(0L)); + // Check that results have not been persisted + SearchResponse resultsDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).get(); + assertThat(Arrays.asList(resultsDocsResponse.getHits().getHits()), empty()); deleteJob(jobId); } From cdfc98680f0e62aec2b558c883e092ee48140601 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sat, 29 Jun 2019 15:17:00 +0300 Subject: [PATCH 073/140] Consistent Secure Settings (#40416) Introduces a new `ConsistentSecureSettingsValidatorService` service that exposes a single public method, namely `allSecureSettingsConsistent`. The method returns `true` if the local node's secure settings (inside the keystore) are equal to the master's, and `false` otherwise. Technically, the local node has to have exactly the same secure settings - setting names should not be missing or in surplus - for all `SecureSetting` instances that are flagged with the newly introduced `Property.Consistent`. It is worth highlighting that the `allSecureSettingsConsistent` is not a consensus view across the cluster, but rather the local node's perspective in relation to the master. --- build.gradle | 4 +- .../common/settings/KeyStoreWrapperTests.java | 23 ++ .../cluster/metadata/DiffableStringMap.java | 4 + .../cluster/metadata/MetaData.java | 51 +++- .../cluster/service/ClusterService.java | 2 +- .../common/hash/MessageDigests.java | 19 +- .../common/settings/ClusterSettings.java | 4 +- .../settings/ConsistentSettingsService.java | 256 ++++++++++++++++++ .../common/settings/KeyStoreWrapper.java | 61 +++-- .../common/settings/SecureSetting.java | 19 +- .../common/settings/SecureSettings.java | 2 + .../common/settings/Setting.java | 20 ++ .../common/settings/Settings.java | 9 +- .../common/settings/SettingsModule.java | 22 +- .../java/org/elasticsearch/node/Node.java | 4 + .../common/settings/ConsistentSettingsIT.java | 188 +++++++++++++ .../ConsistentSettingsServiceTests.java | 159 +++++++++++ .../common/settings/SettingsModuleTests.java | 36 +++ .../common/settings/MockSecureSettings.java | 13 + .../notification/NotificationService.java | 17 +- .../NotificationServiceTests.java | 7 + 21 files changed, 879 insertions(+), 41 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java create mode 100644 server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsIT.java create mode 100644 server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java diff --git a/build.gradle b/build.gradle index 5c1fe80668283..8ee0712383367 100644 --- a/build.gradle +++ b/build.gradle @@ -160,8 +160,8 @@ task verifyVersions { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = true -final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ +boolean bwc_tests_enabled = false +final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/40416" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java index f68a731edf8f7..568ddfe97df16 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java @@ -51,10 +51,12 @@ import java.nio.file.Path; import java.security.GeneralSecurityException; import java.security.KeyStore; +import java.security.MessageDigest; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Base64; import java.util.List; +import java.util.Locale; import java.util.Set; import static org.hamcrest.Matchers.containsString; @@ -126,6 +128,27 @@ public void testCannotReadStringFromClosedKeystore() throws Exception { assertThat(exception.getMessage(), containsString("closed")); } + public void testValueSHA256Digest() throws Exception { + final KeyStoreWrapper keystore = KeyStoreWrapper.create(); + final String stringSettingKeyName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT) + "1"; + final String stringSettingValue = randomAlphaOfLength(32); + keystore.setString(stringSettingKeyName, stringSettingValue.toCharArray()); + final String fileSettingKeyName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT) + "2"; + final byte[] fileSettingValue = randomByteArrayOfLength(32); + keystore.setFile(fileSettingKeyName, fileSettingValue); + + final byte[] stringSettingHash = MessageDigest.getInstance("SHA-256").digest(stringSettingValue.getBytes(StandardCharsets.UTF_8)); + assertThat(keystore.getSHA256Digest(stringSettingKeyName), equalTo(stringSettingHash)); + final byte[] fileSettingHash = MessageDigest.getInstance("SHA-256").digest(fileSettingValue); + assertThat(keystore.getSHA256Digest(fileSettingKeyName), equalTo(fileSettingHash)); + + keystore.close(); + + // value hashes accessible even when the keystore is closed + assertThat(keystore.getSHA256Digest(stringSettingKeyName), equalTo(stringSettingHash)); + assertThat(keystore.getSHA256Digest(fileSettingKeyName), equalTo(fileSettingHash)); + } + public void testUpgradeNoop() throws Exception { KeyStoreWrapper keystore = KeyStoreWrapper.create(); SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DiffableStringMap.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DiffableStringMap.java index 46433eed8a657..b6e31e92698e6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DiffableStringMap.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DiffableStringMap.java @@ -39,6 +39,8 @@ */ public class DiffableStringMap extends AbstractMap implements Diffable { + public static final DiffableStringMap EMPTY = new DiffableStringMap(Collections.emptyMap()); + private final Map innerMap; DiffableStringMap(final Map map) { @@ -75,6 +77,8 @@ public static Diff readDiffFrom(StreamInput in) throws IOExce */ public static class DiffableStringMapDiff implements Diff { + public static final DiffableStringMapDiff EMPTY = new DiffableStringMapDiff(DiffableStringMap.EMPTY, DiffableStringMap.EMPTY); + private final List deletes; private final Map upserts; // diffs also become upserts diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index c0c2bd82295ec..4c2eeec72a970 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -25,6 +25,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.Version; import org.elasticsearch.action.AliasesRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState.FeatureAware; @@ -168,6 +169,7 @@ public interface Custom extends NamedDiffable, ToXContentFragment, Clust private final Settings transientSettings; private final Settings persistentSettings; private final Settings settings; + private final DiffableStringMap hashesOfConsistentSettings; private final ImmutableOpenMap indices; private final ImmutableOpenMap templates; private final ImmutableOpenMap customs; @@ -182,7 +184,7 @@ public interface Custom extends NamedDiffable, ToXContentFragment, Clust private final SortedMap aliasAndIndexLookup; MetaData(String clusterUUID, boolean clusterUUIDCommitted, long version, CoordinationMetaData coordinationMetaData, - Settings transientSettings, Settings persistentSettings, + Settings transientSettings, Settings persistentSettings, DiffableStringMap hashesOfConsistentSettings, ImmutableOpenMap indices, ImmutableOpenMap templates, ImmutableOpenMap customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices, SortedMap aliasAndIndexLookup) { @@ -193,6 +195,7 @@ public interface Custom extends NamedDiffable, ToXContentFragment, Clust this.transientSettings = transientSettings; this.persistentSettings = persistentSettings; this.settings = Settings.builder().put(persistentSettings).put(transientSettings).build(); + this.hashesOfConsistentSettings = hashesOfConsistentSettings; this.indices = indices; this.customs = customs; this.templates = templates; @@ -244,6 +247,10 @@ public Settings persistentSettings() { return this.persistentSettings; } + public Map hashesOfConsistentSettings() { + return this.hashesOfConsistentSettings; + } + public CoordinationMetaData coordinationMetaData() { return this.coordinationMetaData; } @@ -733,6 +740,9 @@ public static boolean isGlobalStateEquals(MetaData metaData1, MetaData metaData2 if (!metaData1.persistentSettings.equals(metaData2.persistentSettings)) { return false; } + if (!metaData1.hashesOfConsistentSettings.equals(metaData2.hashesOfConsistentSettings)) { + return false; + } if (!metaData1.templates.equals(metaData2.templates())) { return false; } @@ -787,6 +797,7 @@ private static class MetaDataDiff implements Diff { private CoordinationMetaData coordinationMetaData; private Settings transientSettings; private Settings persistentSettings; + private Diff hashesOfConsistentSettings; private Diff> indices; private Diff> templates; private Diff> customs; @@ -798,6 +809,7 @@ private static class MetaDataDiff implements Diff { coordinationMetaData = after.coordinationMetaData; transientSettings = after.transientSettings; persistentSettings = after.persistentSettings; + hashesOfConsistentSettings = after.hashesOfConsistentSettings.diff(before.hashesOfConsistentSettings); indices = DiffableUtils.diff(before.indices, after.indices, DiffableUtils.getStringKeySerializer()); templates = DiffableUtils.diff(before.templates, after.templates, DiffableUtils.getStringKeySerializer()); customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer(), CUSTOM_VALUE_SERIALIZER); @@ -810,6 +822,11 @@ private static class MetaDataDiff implements Diff { coordinationMetaData = new CoordinationMetaData(in); transientSettings = Settings.readSettingsFromStream(in); persistentSettings = Settings.readSettingsFromStream(in); + if (in.getVersion().onOrAfter(Version.V_7_3_0)) { + hashesOfConsistentSettings = DiffableStringMap.readDiffFrom(in); + } else { + hashesOfConsistentSettings = DiffableStringMap.DiffableStringMapDiff.EMPTY; + } indices = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), IndexMetaData::readFrom, IndexMetaData::readDiffFrom); templates = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), IndexTemplateMetaData::readFrom, @@ -825,6 +842,9 @@ public void writeTo(StreamOutput out) throws IOException { coordinationMetaData.writeTo(out); Settings.writeSettingsToStream(transientSettings, out); Settings.writeSettingsToStream(persistentSettings, out); + if (out.getVersion().onOrAfter(Version.V_7_3_0)) { + hashesOfConsistentSettings.writeTo(out); + } indices.writeTo(out); templates.writeTo(out); customs.writeTo(out); @@ -839,6 +859,7 @@ public MetaData apply(MetaData part) { builder.coordinationMetaData(coordinationMetaData); builder.transientSettings(transientSettings); builder.persistentSettings(persistentSettings); + builder.hashesOfConsistentSettings(hashesOfConsistentSettings.apply(part.hashesOfConsistentSettings)); builder.indices(indices.apply(part.indices)); builder.templates(templates.apply(part.templates)); builder.customs(customs.apply(part.customs)); @@ -854,6 +875,9 @@ public static MetaData readFrom(StreamInput in) throws IOException { builder.coordinationMetaData(new CoordinationMetaData(in)); builder.transientSettings(readSettingsFromStream(in)); builder.persistentSettings(readSettingsFromStream(in)); + if (in.getVersion().onOrAfter(Version.V_7_3_0)) { + builder.hashesOfConsistentSettings(new DiffableStringMap(in)); + } int size = in.readVInt(); for (int i = 0; i < size; i++) { builder.put(IndexMetaData.readFrom(in), false); @@ -878,6 +902,9 @@ public void writeTo(StreamOutput out) throws IOException { coordinationMetaData.writeTo(out); writeSettingsToStream(transientSettings, out); writeSettingsToStream(persistentSettings, out); + if (out.getVersion().onOrAfter(Version.V_7_3_0)) { + hashesOfConsistentSettings.writeTo(out); + } out.writeVInt(indices.size()); for (IndexMetaData indexMetaData : this) { indexMetaData.writeTo(out); @@ -918,6 +945,7 @@ public static class Builder { private CoordinationMetaData coordinationMetaData = CoordinationMetaData.EMPTY_META_DATA; private Settings transientSettings = Settings.Builder.EMPTY_SETTINGS; private Settings persistentSettings = Settings.Builder.EMPTY_SETTINGS; + private DiffableStringMap hashesOfConsistentSettings = new DiffableStringMap(Collections.emptyMap()); private final ImmutableOpenMap.Builder indices; private final ImmutableOpenMap.Builder templates; @@ -937,6 +965,7 @@ public Builder(MetaData metaData) { this.coordinationMetaData = metaData.coordinationMetaData; this.transientSettings = metaData.transientSettings; this.persistentSettings = metaData.persistentSettings; + this.hashesOfConsistentSettings = metaData.hashesOfConsistentSettings; this.version = metaData.version; this.indices = ImmutableOpenMap.builder(metaData.indices); this.templates = ImmutableOpenMap.builder(metaData.templates); @@ -1100,6 +1129,20 @@ public Builder persistentSettings(Settings settings) { return this; } + public DiffableStringMap hashesOfConsistentSettings() { + return this.hashesOfConsistentSettings; + } + + public Builder hashesOfConsistentSettings(DiffableStringMap hashesOfConsistentSettings) { + this.hashesOfConsistentSettings = hashesOfConsistentSettings; + return this; + } + + public Builder hashesOfConsistentSettings(Map hashesOfConsistentSettings) { + this.hashesOfConsistentSettings = new DiffableStringMap(hashesOfConsistentSettings); + return this; + } + public Builder version(long version) { this.version = version; return this; @@ -1173,8 +1216,8 @@ public MetaData build() { String[] allClosedIndicesArray = allClosedIndices.toArray(new String[allClosedIndices.size()]); return new MetaData(clusterUUID, clusterUUIDCommitted, version, coordinationMetaData, transientSettings, persistentSettings, - indices.build(), templates.build(), customs.build(), allIndicesArray, allOpenIndicesArray, allClosedIndicesArray, - aliasAndIndexLookup); + hashesOfConsistentSettings, indices.build(), templates.build(), customs.build(), allIndicesArray, allOpenIndicesArray, + allClosedIndicesArray, aliasAndIndexLookup); } private SortedMap buildAliasAndIndexLookup() { @@ -1298,6 +1341,8 @@ public static MetaData fromXContent(XContentParser parser) throws IOException { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { builder.put(IndexMetaData.Builder.fromXContent(parser), false); } + } else if ("hashes_of_consistent_settings".equals(currentFieldName)) { + builder.hashesOfConsistentSettings(parser.mapStrings()); } else if ("templates".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { builder.put(IndexTemplateMetaData.Builder.fromXContent(parser, parser.currentName())); diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index f83f2606b14b6..fded43a4bdd19 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -73,7 +73,7 @@ public ClusterService(Settings settings, ClusterSettings clusterSettings, Thread } public ClusterService(Settings settings, ClusterSettings clusterSettings, MasterService masterService, - ClusterApplierService clusterApplierService) { + ClusterApplierService clusterApplierService) { this.settings = settings; this.nodeName = Node.NODE_NAME_SETTING.get(settings); this.masterService = masterService; diff --git a/server/src/main/java/org/elasticsearch/common/hash/MessageDigests.java b/server/src/main/java/org/elasticsearch/common/hash/MessageDigests.java index 8bcef7b8ff4cb..df8f3e2fa7f43 100644 --- a/server/src/main/java/org/elasticsearch/common/hash/MessageDigests.java +++ b/server/src/main/java/org/elasticsearch/common/hash/MessageDigests.java @@ -95,15 +95,24 @@ private static MessageDigest get(ThreadLocal messageDigest) { * @return a hex representation of the input as a String. */ public static String toHexString(byte[] bytes) { - Objects.requireNonNull(bytes); - StringBuilder sb = new StringBuilder(2 * bytes.length); + return new String(toHexCharArray(bytes)); + } + /** + * Encodes the byte array into a newly created hex char array, without allocating any other temporary variables. + * + * @param bytes the input to be encoded as hex. + * @return the hex encoding of the input as a char array. + */ + public static char[] toHexCharArray(byte[] bytes) { + Objects.requireNonNull(bytes); + final char[] result = new char[2 * bytes.length]; for (int i = 0; i < bytes.length; i++) { byte b = bytes[i]; - sb.append(HEX_DIGITS[b >> 4 & 0xf]).append(HEX_DIGITS[b & 0xf]); + result[2 * i] = HEX_DIGITS[b >> 4 & 0xf]; + result[2 * i + 1] = HEX_DIGITS[b & 0xf]; } - - return sb.toString(); + return result; } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index b1bb84cff4d6f..867b628a5f97c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -112,12 +112,12 @@ * Encapsulates all valid cluster level settings. */ public final class ClusterSettings extends AbstractScopedSettings { + public ClusterSettings(final Settings nodeSettings, final Set> settingsSet) { this(nodeSettings, settingsSet, Collections.emptySet()); } - public ClusterSettings( - final Settings nodeSettings, final Set> settingsSet, final Set> settingUpgraders) { + public ClusterSettings(final Settings nodeSettings, final Set> settingsSet, final Set> settingUpgraders) { super(nodeSettings, settingsSet, settingUpgraders, Property.NodeScope); addSettingsUpdater(new LoggingSettingUpdater(nodeSettings)); } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java new file mode 100644 index 0000000000000..411a470238638 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java @@ -0,0 +1,256 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.hash.MessageDigests; +import org.elasticsearch.threadpool.ThreadPool; + +import java.nio.charset.StandardCharsets; +import java.security.NoSuchAlgorithmException; +import java.security.spec.InvalidKeySpecException; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import javax.crypto.SecretKey; +import javax.crypto.SecretKeyFactory; +import javax.crypto.spec.PBEKeySpec; + +/** + * Used to publish secure setting hashes in the cluster state and to validate those hashes against the local values of those same settings. + * This is colloquially referred to as the secure setting consistency check. It will publish and verify hashes only for the collection + * of settings passed in the constructor. The settings have to have the {@link Setting.Property#Consistent} property. + */ +public final class ConsistentSettingsService { + private static final Logger logger = LogManager.getLogger(ConsistentSettingsService.class); + + private final Settings settings; + private final ClusterService clusterService; + private final Collection> secureSettingsCollection; + private final SecretKeyFactory pbkdf2KeyFactory; + + public ConsistentSettingsService(Settings settings, ClusterService clusterService, + Collection> secureSettingsCollection) { + this.settings = settings; + this.clusterService = clusterService; + this.secureSettingsCollection = secureSettingsCollection; + // this is used to compute the PBKDF2 hash (the published one) + try { + this.pbkdf2KeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA512"); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException("The \"PBKDF2WithHmacSHA512\" algorithm is required for consistent secure settings' hashes", e); + } + } + + /** + * Returns a {@link LocalNodeMasterListener} that will publish hashes of all the settings passed in the constructor. These hashes are + * published by the master node only. Note that this is not designed for {@link SecureSettings} implementations that are mutable. + */ + public LocalNodeMasterListener newHashPublisher() { + // eagerly compute hashes to be published + final Map computedHashesOfConsistentSettings = computeHashesOfConsistentSecureSettings(); + return new HashesPublisher(computedHashesOfConsistentSettings, clusterService); + } + + /** + * Verifies that the hashes of consistent secure settings in the latest {@code ClusterState} verify for the values of those same + * settings on the local node. The settings to be checked are passed in the constructor. Also, validates that a missing local + * value is also missing in the published set, and vice-versa. + */ + public boolean areAllConsistent() { + final ClusterState state = clusterService.state(); + final Map publishedHashesOfConsistentSettings = state.metaData().hashesOfConsistentSettings(); + final Set publishedSettingKeysToVerify = new HashSet<>(); + publishedSettingKeysToVerify.addAll(publishedHashesOfConsistentSettings.keySet()); + final AtomicBoolean allConsistent = new AtomicBoolean(true); + forEachConcreteSecureSettingDo(concreteSecureSetting -> { + final String publishedSaltAndHash = publishedHashesOfConsistentSettings.get(concreteSecureSetting.getKey()); + final byte[] localHash = concreteSecureSetting.getSecretDigest(settings); + if (publishedSaltAndHash == null && localHash == null) { + // consistency of missing + logger.debug("no published hash for the consistent secure setting [{}] but it also does NOT exist on the local node", + concreteSecureSetting.getKey()); + } else if (publishedSaltAndHash == null && localHash != null) { + // setting missing on master but present locally + logger.warn("no published hash for the consistent secure setting [{}] but it exists on the local node", + concreteSecureSetting.getKey()); + if (state.nodes().isLocalNodeElectedMaster()) { + throw new IllegalStateException("Master node cannot validate consistent setting. No published hash for [" + + concreteSecureSetting.getKey() + "] but setting exists."); + } + allConsistent.set(false); + } else if (publishedSaltAndHash != null && localHash == null) { + // setting missing locally but present on master + logger.warn("the consistent secure setting [{}] does not exist on the local node but there is a published hash for it", + concreteSecureSetting.getKey()); + allConsistent.set(false); + } else { + assert publishedSaltAndHash != null; + assert localHash != null; + final String[] parts = publishedSaltAndHash.split(":"); + if (parts == null || parts.length != 2) { + throw new IllegalArgumentException("published hash [" + publishedSaltAndHash + " ] for secure setting [" + + concreteSecureSetting.getKey() + "] is invalid"); + } + final String publishedSalt = parts[0]; + final String publishedHash = parts[1]; + final byte[] computedSaltedHashBytes = computeSaltedPBKDF2Hash(localHash, publishedSalt.getBytes(StandardCharsets.UTF_8)); + final String computedSaltedHash = new String(Base64.getEncoder().encode(computedSaltedHashBytes), StandardCharsets.UTF_8); + if (false == publishedHash.equals(computedSaltedHash)) { + logger.warn("the published hash [{}] of the consistent secure setting [{}] differs from the locally computed one [{}]", + publishedHash, concreteSecureSetting.getKey(), computedSaltedHash); + if (state.nodes().isLocalNodeElectedMaster()) { + throw new IllegalStateException("Master node cannot validate consistent setting. The published hash [" + + publishedHash + "] of the consistent secure setting [" + concreteSecureSetting.getKey() + + "] differs from the locally computed one [" + computedSaltedHash + "]."); + } + allConsistent.set(false); + } + } + publishedSettingKeysToVerify.remove(concreteSecureSetting.getKey()); + }); + // another case of settings missing locally, when group settings have not expanded to all the keys published + for (String publishedSettingKey : publishedSettingKeysToVerify) { + for (Setting setting : secureSettingsCollection) { + if (setting.match(publishedSettingKey)) { + // setting missing locally but present on master + logger.warn("the consistent secure setting [{}] does not exist on the local node but there is a published hash for it", + publishedSettingKey); + allConsistent.set(false); + } + } + } + return allConsistent.get(); + } + + /** + * Iterate over the passed in secure settings, expanding {@link Setting.AffixSetting} to concrete settings, in the scope of the local + * settings. + */ + private void forEachConcreteSecureSettingDo(Consumer> secureSettingConsumer) { + for (Setting setting : secureSettingsCollection) { + assert setting.isConsistent() : "[" + setting.getKey() + "] is not a consistent setting"; + if (setting instanceof Setting.AffixSetting) { + ((Setting.AffixSetting)setting).getAllConcreteSettings(settings).forEach(concreteSetting -> { + assert concreteSetting instanceof SecureSetting : "[" + concreteSetting.getKey() + "] is not a secure setting"; + secureSettingConsumer.accept((SecureSetting)concreteSetting); + }); + } else if (setting instanceof SecureSetting) { + secureSettingConsumer.accept((SecureSetting) setting); + } else { + assert false : "Unrecognized consistent secure setting [" + setting.getKey() + "]"; + } + } + } + + private Map computeHashesOfConsistentSecureSettings() { + final Map hashesBySettingKey = new HashMap<>(); + forEachConcreteSecureSettingDo(concreteSecureSetting -> { + final byte[] localHash = concreteSecureSetting.getSecretDigest(settings); + if (localHash != null) { + final String salt = UUIDs.randomBase64UUID(); + final byte[] publicHash = computeSaltedPBKDF2Hash(localHash, salt.getBytes(StandardCharsets.UTF_8)); + final String encodedPublicHash = new String(Base64.getEncoder().encode(publicHash), StandardCharsets.UTF_8); + hashesBySettingKey.put(concreteSecureSetting.getKey(), salt + ":" + encodedPublicHash); + } + }); + return hashesBySettingKey; + } + + private byte[] computeSaltedPBKDF2Hash(byte[] bytes, byte[] salt) { + final int iterations = 5000; + final int keyLength = 512; + char[] value = null; + try { + value = MessageDigests.toHexCharArray(bytes); + final PBEKeySpec spec = new PBEKeySpec(value, salt, iterations, keyLength); + final SecretKey key = pbkdf2KeyFactory.generateSecret(spec); + return key.getEncoded(); + } catch (InvalidKeySpecException e) { + throw new RuntimeException("Unexpected exception when computing PBKDF2 hash", e); + } finally { + if (value != null) { + Arrays.fill(value, '0'); + } + } + } + + static final class HashesPublisher implements LocalNodeMasterListener { + + // eagerly compute hashes to be published + final Map computedHashesOfConsistentSettings; + final ClusterService clusterService; + + HashesPublisher(Map computedHashesOfConsistentSettings, ClusterService clusterService) { + this.computedHashesOfConsistentSettings = Map.copyOf(computedHashesOfConsistentSettings); + this.clusterService = clusterService; + } + + @Override + public void onMaster() { + clusterService.submitStateUpdateTask("publish-secure-settings-hashes", new ClusterStateUpdateTask(Priority.URGENT) { + @Override + public ClusterState execute(ClusterState currentState) { + final Map publishedHashesOfConsistentSettings = currentState.metaData() + .hashesOfConsistentSettings(); + if (computedHashesOfConsistentSettings.equals(publishedHashesOfConsistentSettings)) { + logger.debug("Nothing to publish. What is already published matches this node's view."); + return currentState; + } else { + return ClusterState.builder(currentState).metaData(MetaData.builder(currentState.metaData()) + .hashesOfConsistentSettings(computedHashesOfConsistentSettings)).build(); + } + } + + @Override + public void onFailure(String source, Exception e) { + logger.error("unable to publish secure settings hashes", e); + } + + }); + } + + @Override + public void offMaster() { + logger.trace("I am no longer master, nothing to do"); + } + + @Override + public String executorName() { + return ThreadPool.Names.SAME; + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 64cdd7165f2a3..7ad69c1eebe0c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -30,6 +30,7 @@ import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.hash.MessageDigests; import javax.crypto.Cipher; import javax.crypto.CipherInputStream; @@ -85,6 +86,17 @@ private enum EntryType { FILE } + /** An entry in the keystore. The bytes are opaque and interpreted based on the entry type. */ + private static class Entry { + final byte[] bytes; + final byte[] sha256Digest; + + Entry(byte[] bytes) { + this.bytes = bytes; + this.sha256Digest = MessageDigests.sha256().digest(bytes); + } + } + /** * A regex for the valid characters that a setting name in the keystore may use. */ @@ -148,7 +160,7 @@ private enum EntryType { private final byte[] dataBytes; /** The decrypted secret data. See {@link #decrypt(char[])}. */ - private final SetOnce> entries = new SetOnce<>(); + private final SetOnce> entries = new SetOnce<>(); private volatile boolean closed; private KeyStoreWrapper(int formatVersion, boolean hasPassword, byte[] dataBytes) { @@ -350,7 +362,7 @@ public void decrypt(char[] password) throws GeneralSecurityException, IOExceptio int entrySize = input.readInt(); byte[] entryBytes = new byte[entrySize]; input.readFully(entryBytes); - entries.get().put(setting, entryBytes); + entries.get().put(setting, new Entry(entryBytes)); } if (input.read() != -1) { throw new SecurityException("Keystore has been corrupted or tampered with"); @@ -369,11 +381,11 @@ private byte[] encrypt(char[] password, byte[] salt, byte[] iv) throws GeneralSe try (CipherOutputStream cipherStream = new CipherOutputStream(bytes, cipher); DataOutputStream output = new DataOutputStream(cipherStream)) { output.writeInt(entries.get().size()); - for (Map.Entry mapEntry : entries.get().entrySet()) { + for (Map.Entry mapEntry : entries.get().entrySet()) { output.writeUTF(mapEntry.getKey()); - byte[] entry = mapEntry.getValue(); - output.writeInt(entry.length); - output.write(entry); + byte[] entryBytes = mapEntry.getValue().bytes; + output.writeInt(entryBytes.length); + output.write(entryBytes); } } return bytes.toByteArray(); @@ -448,7 +460,7 @@ private void decryptLegacyEntries() throws GeneralSecurityException, IOException } Arrays.fill(chars, '\0'); - entries.get().put(setting, bytes); + entries.get().put(setting, new Entry(bytes)); } } @@ -521,8 +533,8 @@ public Set getSettingNames() { @Override public synchronized SecureString getString(String setting) { ensureOpen(); - byte[] entry = entries.get().get(setting); - ByteBuffer byteBuffer = ByteBuffer.wrap(entry); + Entry entry = entries.get().get(setting); + ByteBuffer byteBuffer = ByteBuffer.wrap(entry.bytes); CharBuffer charBuffer = StandardCharsets.UTF_8.decode(byteBuffer); return new SecureString(Arrays.copyOfRange(charBuffer.array(), charBuffer.position(), charBuffer.limit())); } @@ -530,8 +542,19 @@ public synchronized SecureString getString(String setting) { @Override public synchronized InputStream getFile(String setting) { ensureOpen(); - byte[] entry = entries.get().get(setting); - return new ByteArrayInputStream(entry); + Entry entry = entries.get().get(setting); + return new ByteArrayInputStream(entry.bytes); + } + + /** + * Returns the SHA256 digest for the setting's value, even after {@code #close()} has been called. The setting must exist. The digest is + * used to check for value changes without actually storing the value. + */ + @Override + public byte[] getSHA256Digest(String setting) { + assert entries.get() != null : "Keystore is not loaded"; + Entry entry = entries.get().get(setting); + return entry.sha256Digest; } /** @@ -553,9 +576,9 @@ synchronized void setString(String setting, char[] value) { ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(CharBuffer.wrap(value)); byte[] bytes = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.position(), byteBuffer.limit()); - byte[] oldEntry = entries.get().put(setting, bytes); + Entry oldEntry = entries.get().put(setting, new Entry(bytes)); if (oldEntry != null) { - Arrays.fill(oldEntry, (byte)0); + Arrays.fill(oldEntry.bytes, (byte)0); } } @@ -564,18 +587,18 @@ synchronized void setFile(String setting, byte[] bytes) { ensureOpen(); validateSettingName(setting); - byte[] oldEntry = entries.get().put(setting, Arrays.copyOf(bytes, bytes.length)); + Entry oldEntry = entries.get().put(setting, new Entry(Arrays.copyOf(bytes, bytes.length))); if (oldEntry != null) { - Arrays.fill(oldEntry, (byte)0); + Arrays.fill(oldEntry.bytes, (byte)0); } } /** Remove the given setting from the keystore. */ void remove(String setting) { ensureOpen(); - byte[] oldEntry = entries.get().remove(setting); + Entry oldEntry = entries.get().remove(setting); if (oldEntry != null) { - Arrays.fill(oldEntry, (byte)0); + Arrays.fill(oldEntry.bytes, (byte)0); } } @@ -590,8 +613,8 @@ private void ensureOpen() { public synchronized void close() { this.closed = true; if (null != entries.get() && entries.get().isEmpty() == false) { - for (byte[] entry : entries.get().values()) { - Arrays.fill(entry, (byte) 0); + for (Entry entry : entries.get().values()) { + Arrays.fill(entry.bytes, (byte) 0); } } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java index 33f4718aa45e4..e022e4e3760a5 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java @@ -37,7 +37,7 @@ public abstract class SecureSetting extends Setting { /** Determines whether legacy settings with sensitive values should be allowed. */ private static final boolean ALLOW_INSECURE_SETTINGS = Booleans.parseBoolean(System.getProperty("es.allow_insecure_settings", "false")); - private static final Set ALLOWED_PROPERTIES = EnumSet.of(Property.Deprecated); + private static final Set ALLOWED_PROPERTIES = EnumSet.of(Property.Deprecated, Property.Consistent); private static final Property[] FIXED_PROPERTIES = { Property.NodeScope @@ -97,6 +97,23 @@ public T get(Settings settings) { } } + /** + * Returns the digest of this secure setting's value or {@code null} if the setting is missing (inside the keystore). This method can be + * called even after the {@code SecureSettings} have been closed, unlike {@code #get(Settings)}. The digest is used to check for changes + * of the value (by re-reading the {@code SecureSettings}), without actually transmitting the value to compare with. + */ + public byte[] getSecretDigest(Settings settings) { + final SecureSettings secureSettings = settings.getSecureSettings(); + if (secureSettings == null || false == secureSettings.getSettingNames().contains(getKey())) { + return null; + } + try { + return secureSettings.getSHA256Digest(getKey()); + } catch (GeneralSecurityException e) { + throw new RuntimeException("failed to read secure setting " + getKey(), e); + } + } + /** Returns the secret setting from the keyStoreReader store. */ abstract T getSecret(SecureSettings secureSettings) throws GeneralSecurityException; diff --git a/server/src/main/java/org/elasticsearch/common/settings/SecureSettings.java b/server/src/main/java/org/elasticsearch/common/settings/SecureSettings.java index 98f980c1ec6c8..7f92b382dd7b1 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SecureSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SecureSettings.java @@ -42,6 +42,8 @@ public interface SecureSettings extends Closeable { /** Return a file setting. The {@link InputStream} should be closed once it is used. */ InputStream getFile(String setting) throws GeneralSecurityException; + byte[] getSHA256Digest(String setting) throws GeneralSecurityException; + @Override void close() throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 1e5079124c345..d4164b474de0b 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -112,6 +112,11 @@ public enum Property { */ NodeScope, + /** + * Secure setting values equal on all nodes + */ + Consistent, + /** * Index scope */ @@ -167,6 +172,7 @@ private Setting(Key key, @Nullable Setting fallbackSetting, Function properties, } } + private void checkPropertyRequiresNodeScope(final EnumSet properties, final Property property) { + if (properties.contains(property) && properties.contains(Property.NodeScope) == false) { + throw new IllegalArgumentException("non-node-scoped setting [" + key + "] can not have property [" + property + "]"); + } + } + /** * Creates a new Setting instance * @param key the settings key for this setting. @@ -321,6 +333,14 @@ public boolean hasNodeScope() { return properties.contains(Property.NodeScope); } + /** + * Returns true if this setting's value can be checked for equality across all nodes. Only {@link SecureSetting} instances + * may have this qualifier. + */ + public boolean isConsistent() { + return properties.contains(Property.Consistent); + } + /** * Returns true if this setting has an index scope, otherwise false */ diff --git a/server/src/main/java/org/elasticsearch/common/settings/Settings.java b/server/src/main/java/org/elasticsearch/common/settings/Settings.java index 5789abf76d8b0..e92d53f801183 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -1324,15 +1324,20 @@ public Set getSettingNames() { } @Override - public SecureString getString(String setting) throws GeneralSecurityException{ + public SecureString getString(String setting) throws GeneralSecurityException { return delegate.getString(addPrefix.apply(setting)); } @Override - public InputStream getFile(String setting) throws GeneralSecurityException{ + public InputStream getFile(String setting) throws GeneralSecurityException { return delegate.getFile(addPrefix.apply(setting)); } + @Override + public byte[] getSHA256Digest(String setting) throws GeneralSecurityException { + return delegate.getSHA256Digest(addPrefix.apply(setting)); + } + @Override public void close() throws IOException { delegate.close(); diff --git a/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 6a78e81d7f3f4..58c9cbc520456 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -49,6 +49,7 @@ public class SettingsModule implements Module { private final Set settingsFilterPattern = new HashSet<>(); private final Map> nodeSettings = new HashMap<>(); private final Map> indexSettings = new HashMap<>(); + private final Set> consistentSettings = new HashSet<>(); private final IndexScopedSettings indexScopedSettings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; @@ -157,7 +158,6 @@ public void configure(Binder binder) { binder.bind(IndexScopedSettings.class).toInstance(indexScopedSettings); } - /** * Registers a new setting. This method should be used by plugins in order to expose any custom settings the plugin defines. * Unless a setting is registered the setting is unusable. If a setting is never the less specified the node will reject @@ -175,6 +175,19 @@ private void registerSetting(Setting setting) { if (existingSetting != null) { throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); } + if (setting.isConsistent()) { + if (setting instanceof Setting.AffixSetting) { + if (((Setting.AffixSetting)setting).getConcreteSettingForNamespace("_na_") instanceof SecureSetting) { + consistentSettings.add(setting); + } else { + throw new IllegalArgumentException("Invalid consistent secure setting [" + setting.getKey() + "]"); + } + } else if (setting instanceof SecureSetting) { + consistentSettings.add(setting); + } else { + throw new IllegalArgumentException("Invalid consistent secure setting [" + setting.getKey() + "]"); + } + } nodeSettings.put(setting.getKey(), setting); } if (setting.hasIndexScope()) { @@ -182,6 +195,9 @@ private void registerSetting(Setting setting) { if (existingSetting != null) { throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); } + if (setting.isConsistent()) { + throw new IllegalStateException("Consistent setting [" + setting.getKey() + "] cannot be index scoped"); + } indexSettings.put(setting.getKey(), setting); } } else { @@ -215,6 +231,10 @@ public ClusterSettings getClusterSettings() { return clusterSettings; } + public Set> getConsistentSettings() { + return consistentSettings; + } + public SettingsFilter getSettingsFilter() { return settingsFilter; } diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 3fab82c3e9eb9..8b8c051eef9a0 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -74,6 +74,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.ConsistentSettingsService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.SettingUpgrader; @@ -363,6 +364,9 @@ protected Node( final ClusterService clusterService = new ClusterService(settings, settingsModule.getClusterSettings(), threadPool); clusterService.addStateApplier(scriptModule.getScriptService()); resourcesToClose.add(clusterService); + clusterService.addLocalNodeMasterListener( + new ConsistentSettingsService(settings, clusterService, settingsModule.getConsistentSettings()) + .newHashPublisher()); final IngestService ingestService = new IngestService(clusterService, threadPool, this.environment, scriptModule.getScriptService(), analysisModule.getAnalysisRegistry(), pluginsService.filterPlugins(IngestPlugin.class)); final DiskThresholdMonitor listener = new DiskThresholdMonitor(settings, clusterService::state, diff --git a/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsIT.java b/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsIT.java new file mode 100644 index 0000000000000..4ee0c6849c2bc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsIT.java @@ -0,0 +1,188 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting.AffixSetting; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) +public class ConsistentSettingsIT extends ESIntegTestCase { + + static final Setting DUMMY_STRING_CONSISTENT_SETTING = SecureSetting + .secureString("dummy.consistent.secure.string.setting", null, Setting.Property.Consistent); + static final AffixSetting DUMMY_AFFIX_STRING_CONSISTENT_SETTING = Setting.affixKeySetting( + "dummy.consistent.secure.string.affix.setting.", "suffix", + key -> SecureSetting.secureString(key, null, Setting.Property.Consistent)); + private final AtomicReference> nodeSettingsOverride = new AtomicReference<>(null); + + public void testAllConsistentOnAllNodesSuccess() throws Exception { + for (String nodeName : internalCluster().getNodeNames()) { + Environment environment = internalCluster().getInstance(Environment.class, nodeName); + ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nodeName); + assertTrue("Empty settings list always consistent.", + new ConsistentSettingsService(environment.settings(), clusterService, Collections.emptyList()).areAllConsistent()); + assertTrue( + "Simple consistent secure setting is consistent [" + clusterService.state().metaData().hashesOfConsistentSettings() + + "].", + new ConsistentSettingsService(environment.settings(), clusterService, + Collections.singletonList(DUMMY_STRING_CONSISTENT_SETTING)).areAllConsistent()); + assertTrue( + "Affix consistent secure setting is consistent [" + clusterService.state().metaData().hashesOfConsistentSettings() + + "].", + new ConsistentSettingsService(environment.settings(), clusterService, + Collections.singletonList(DUMMY_AFFIX_STRING_CONSISTENT_SETTING)).areAllConsistent()); + assertTrue("All secure settings are consistent [" + clusterService.state().metaData().hashesOfConsistentSettings() + "].", + new ConsistentSettingsService(environment.settings(), clusterService, + List.of(DUMMY_STRING_CONSISTENT_SETTING, DUMMY_AFFIX_STRING_CONSISTENT_SETTING)).areAllConsistent()); + } + } + + public void testConsistencyFailures() throws Exception { + nodeSettingsOverride.set(nodeOrdinal -> { + Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); + MockSecureSettings secureSettings = new MockSecureSettings(); + if (randomBoolean()) { + // different value + secureSettings.setString("dummy.consistent.secure.string.setting", "DIFFERENT_VALUE"); + } else { + // missing value + // secureSettings.setString("dummy.consistent.secure.string.setting", "string_value"); + } + secureSettings.setString("dummy.consistent.secure.string.affix.setting." + "affix1" + ".suffix", "affix_value_1"); + secureSettings.setString("dummy.consistent.secure.string.affix.setting." + "affix2" + ".suffix", "affix_value_2"); + assert builder.getSecureSettings() == null : "Deal with the settings merge"; + builder.setSecureSettings(secureSettings); + return builder.build(); + }); + String newNodeName = internalCluster().startNode(); + Environment environment = internalCluster().getInstance(Environment.class, newNodeName); + ClusterService clusterService = internalCluster().getInstance(ClusterService.class, newNodeName); + assertTrue("Empty settings list always consistent.", + new ConsistentSettingsService(environment.settings(), clusterService, Collections.emptyList()).areAllConsistent()); + assertFalse( + "Simple consistent secure setting is NOT consistent [" + clusterService.state().metaData().hashesOfConsistentSettings() + + "].", + new ConsistentSettingsService(environment.settings(), clusterService, + Collections.singletonList(DUMMY_STRING_CONSISTENT_SETTING)).areAllConsistent()); + assertTrue( + "Affix consistent secure setting is consistent [" + clusterService.state().metaData().hashesOfConsistentSettings() + + "].", + new ConsistentSettingsService(environment.settings(), clusterService, + Collections.singletonList(DUMMY_AFFIX_STRING_CONSISTENT_SETTING)).areAllConsistent()); + assertFalse("All secure settings are NOT consistent [" + clusterService.state().metaData().hashesOfConsistentSettings() + "].", + new ConsistentSettingsService(environment.settings(), clusterService, + List.of(DUMMY_STRING_CONSISTENT_SETTING, DUMMY_AFFIX_STRING_CONSISTENT_SETTING)).areAllConsistent()); + nodeSettingsOverride.set(nodeOrdinal -> { + Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("dummy.consistent.secure.string.setting", "string_value"); + if (randomBoolean()) { + secureSettings.setString("dummy.consistent.secure.string.affix.setting." + "affix1" + ".suffix", "affix_value_1"); + if (randomBoolean()) { + secureSettings.setString("dummy.consistent.secure.string.affix.setting." + "affix2" + ".suffix", "DIFFERENT_VALUE"); + } else { + // missing value + // "dummy.consistent.secure.string.affix.setting.affix2.suffix" + } + } else { + if (randomBoolean()) { + secureSettings.setString("dummy.consistent.secure.string.affix.setting." + "affix1" + ".suffix", "DIFFERENT_VALUE_1"); + secureSettings.setString("dummy.consistent.secure.string.affix.setting." + "affix2" + ".suffix", "DIFFERENT_VALUE_2"); + } else { + // missing values + // dummy.consistent.secure.string.affix.setting.affix1.suffix + // dummy.consistent.secure.string.affix.setting.affix2.suffix + } + } + assert builder.getSecureSettings() == null : "Deal with the settings merge"; + builder.setSecureSettings(secureSettings); + return builder.build(); + }); + newNodeName = internalCluster().startNode(); + environment = internalCluster().getInstance(Environment.class, newNodeName); + clusterService = internalCluster().getInstance(ClusterService.class, newNodeName); + assertTrue("Empty settings list always consistent.", + new ConsistentSettingsService(environment.settings(), clusterService, Collections.emptyList()).areAllConsistent()); + assertTrue( + "Simple consistent secure setting is consistent [" + clusterService.state().metaData().hashesOfConsistentSettings() + + "].", + new ConsistentSettingsService(environment.settings(), clusterService, + Collections.singletonList(DUMMY_STRING_CONSISTENT_SETTING)).areAllConsistent()); + assertFalse( + "Affix consistent secure setting is NOT consistent [" + clusterService.state().metaData().hashesOfConsistentSettings() + + "].", + new ConsistentSettingsService(environment.settings(), clusterService, + Collections.singletonList(DUMMY_AFFIX_STRING_CONSISTENT_SETTING)).areAllConsistent()); + assertFalse("All secure settings are NOT consistent [" + clusterService.state().metaData().hashesOfConsistentSettings() + "].", + new ConsistentSettingsService(environment.settings(), clusterService, + List.of(DUMMY_STRING_CONSISTENT_SETTING, DUMMY_AFFIX_STRING_CONSISTENT_SETTING)).areAllConsistent()); + nodeSettingsOverride.set(null); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + Function nodeSettingsOverrideFunction = nodeSettingsOverride.get(); + if (nodeSettingsOverrideFunction != null) { + final Settings overrideSettings = nodeSettingsOverrideFunction.apply(nodeOrdinal); + if (overrideSettings != null) { + return overrideSettings; + } + } + Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("dummy.consistent.secure.string.setting", "string_value"); + secureSettings.setString("dummy.consistent.secure.string.affix.setting." + "affix1" + ".suffix", "affix_value_1"); + secureSettings.setString("dummy.consistent.secure.string.affix.setting." + "affix2" + ".suffix", "affix_value_2"); + assert builder.getSecureSettings() == null : "Deal with the settings merge"; + builder.setSecureSettings(secureSettings); + return builder.build(); + } + + @Override + protected Collection> nodePlugins() { + Collection> classes = new ArrayList<>(super.nodePlugins()); + classes.add(DummyPlugin.class); + return classes; + } + + public static final class DummyPlugin extends Plugin { + + public DummyPlugin() { + } + + @Override + public List> getSettings() { + List> settings = new ArrayList<>(super.getSettings()); + settings.add(DUMMY_STRING_CONSISTENT_SETTING); + settings.add(DUMMY_AFFIX_STRING_CONSISTENT_SETTING); + return settings; + } + } +} diff --git a/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java new file mode 100644 index 0000000000000..687b74e3397cb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/settings/ConsistentSettingsServiceTests.java @@ -0,0 +1,159 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.mock.orig.Mockito; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; +import org.mockito.stubbing.Answer; + +import java.util.List; +import java.util.Locale; +import java.util.concurrent.atomic.AtomicReference; + +import static org.mockito.Mockito.mock; +import static org.hamcrest.Matchers.is; + +public class ConsistentSettingsServiceTests extends ESTestCase { + + private AtomicReference clusterState = new AtomicReference<>(); + private ClusterService clusterService; + + @Before + public void init() throws Exception { + clusterState.set(ClusterState.EMPTY_STATE); + clusterService = mock(ClusterService.class); + Mockito.doAnswer((Answer) invocation -> { + return clusterState.get(); + }).when(clusterService).state(); + Mockito.doAnswer((Answer) invocation -> { + final ClusterStateUpdateTask arg0 = (ClusterStateUpdateTask) invocation.getArguments()[1]; + this.clusterState.set(arg0.execute(this.clusterState.get())); + return null; + }).when(clusterService).submitStateUpdateTask(Mockito.isA(String.class), Mockito.isA(ClusterStateUpdateTask.class)); + } + + public void testSingleStringSetting() throws Exception { + Setting stringSetting = SecureSetting.secureString("test.simple.foo", null, Setting.Property.Consistent); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(stringSetting.getKey(), "somethingsecure"); + secureSettings.setString("test.noise.setting", "noise"); + Settings.Builder builder = Settings.builder(); + builder.setSecureSettings(secureSettings); + Settings settings = builder.build(); + // hashes not yet published + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(false)); + // publish + new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).newHashPublisher().onMaster(); + ConsistentSettingsService consistentService = new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)); + assertThat(consistentService.areAllConsistent(), is(true)); + // change value + secureSettings.setString(stringSetting.getKey(), "_TYPO_somethingsecure"); + assertThat(consistentService.areAllConsistent(), is(false)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(false)); + // publish change + new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).newHashPublisher().onMaster(); + assertThat(consistentService.areAllConsistent(), is(true)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(true)); + } + + public void testSingleAffixSetting() throws Exception { + Setting.AffixSetting affixStringSetting = Setting.affixKeySetting("test.affix.", "bar", + (key) -> SecureSetting.secureString(key, null, Setting.Property.Consistent)); + // add two affix settings to the keystore + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("test.noise.setting", "noise"); + secureSettings.setString("test.affix.first.bar", "first_secure"); + secureSettings.setString("test.affix.second.bar", "second_secure"); + Settings.Builder builder = Settings.builder(); + builder.setSecureSettings(secureSettings); + Settings settings = builder.build(); + // hashes not yet published + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(false)); + // publish + new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).newHashPublisher().onMaster(); + ConsistentSettingsService consistentService = new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)); + assertThat(consistentService.areAllConsistent(), is(true)); + // change value + secureSettings.setString("test.affix.second.bar", "_TYPO_second_secure"); + assertThat(consistentService.areAllConsistent(), is(false)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(false)); + // publish change + new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).newHashPublisher().onMaster(); + assertThat(consistentService.areAllConsistent(), is(true)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(true)); + // add value + secureSettings.setString("test.affix.third.bar", "third_secure"); + builder = Settings.builder(); + builder.setSecureSettings(secureSettings); + settings = builder.build(); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(false)); + // publish + new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).newHashPublisher().onMaster(); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(true)); + // remove value + secureSettings = new MockSecureSettings(); + secureSettings.setString("test.another.noise.setting", "noise"); + // missing value test.affix.first.bar + secureSettings.setString("test.affix.second.bar", "second_secure"); + secureSettings.setString("test.affix.third.bar", "third_secure"); + builder = Settings.builder(); + builder.setSecureSettings(secureSettings); + settings = builder.build(); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(false)); + } + + public void testStringAndAffixSettings() throws Exception { + Setting stringSetting = SecureSetting.secureString("mock.simple.foo", null, Setting.Property.Consistent); + Setting.AffixSetting affixStringSetting = Setting.affixKeySetting("mock.affix.", "bar", + (key) -> SecureSetting.secureString(key, null, Setting.Property.Consistent)); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(randomAlphaOfLength(8).toLowerCase(Locale.ROOT), "noise"); + secureSettings.setString(stringSetting.getKey(), "somethingsecure"); + secureSettings.setString("mock.affix.foo.bar", "another_secure"); + Settings.Builder builder = Settings.builder(); + builder.setSecureSettings(secureSettings); + Settings settings = builder.build(); + // hashes not yet published + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting, affixStringSetting)).areAllConsistent(), + is(false)); + // publish only the simple string setting + new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).newHashPublisher().onMaster(); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(true)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(false)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting, affixStringSetting)).areAllConsistent(), + is(false)); + // publish only the affix string setting + new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).newHashPublisher().onMaster(); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(false)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(true)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting, affixStringSetting)).areAllConsistent(), + is(false)); + // publish both settings + new ConsistentSettingsService(settings, clusterService, List.of(stringSetting, affixStringSetting)).newHashPublisher().onMaster(); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting)).areAllConsistent(), is(true)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(affixStringSetting)).areAllConsistent(), is(true)); + assertThat(new ConsistentSettingsService(settings, clusterService, List.of(stringSetting, affixStringSetting)).areAllConsistent(), + is(true)); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index c6182eac8f680..c374984eb5d15 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -21,11 +21,13 @@ import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.settings.Setting.Property; +import org.hamcrest.Matchers; import java.util.Arrays; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; public class SettingsModuleTests extends ModuleTestCase { @@ -85,6 +87,40 @@ public void testRegisterSettings() { } } + public void testRegisterConsistentSettings() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("some.custom.secure.consistent.setting", "secure_value"); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + final Setting concreteConsistentSetting = SecureSetting.secureString("some.custom.secure.consistent.setting", null, + Setting.Property.Consistent); + SettingsModule module = new SettingsModule(settings, concreteConsistentSetting); + assertInstanceBinding(module, Settings.class, (s) -> s == settings); + assertThat(module.getConsistentSettings(), Matchers.containsInAnyOrder(concreteConsistentSetting)); + + final Setting concreteUnsecureConsistentSetting = Setting.simpleString("some.custom.UNSECURE.consistent.setting", + Property.Consistent, Property.NodeScope); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new SettingsModule(Settings.builder().build(), concreteUnsecureConsistentSetting)); + assertThat(e.getMessage(), is("Invalid consistent secure setting [some.custom.UNSECURE.consistent.setting]")); + + secureSettings = new MockSecureSettings(); + secureSettings.setString("some.custom.secure.consistent.afix.wow.setting", "secure_value"); + final Settings settings2 = Settings.builder().setSecureSettings(secureSettings).build(); + final Setting afixConcreteConsistentSetting = Setting.affixKeySetting( + "some.custom.secure.consistent.afix.", "setting", + key -> SecureSetting.secureString(key, null, Setting.Property.Consistent)); + module = new SettingsModule(settings2,afixConcreteConsistentSetting); + assertInstanceBinding(module, Settings.class, (s) -> s == settings2); + assertThat(module.getConsistentSettings(), Matchers.containsInAnyOrder(afixConcreteConsistentSetting)); + + final Setting concreteUnsecureConsistentAfixSetting = Setting.affixKeySetting( + "some.custom.secure.consistent.afix.", "setting", + key -> Setting.simpleString(key, Setting.Property.Consistent, Property.NodeScope)); + e = expectThrows(IllegalArgumentException.class, + () -> new SettingsModule(Settings.builder().build(), concreteUnsecureConsistentAfixSetting)); + assertThat(e.getMessage(), is("Invalid consistent secure setting [some.custom.secure.consistent.afix.*.setting]")); + } + public void testLoggerSettings() { { Settings settings = Settings.builder().put("logger._root", "TRACE").put("logger.transport", "INFO").build(); diff --git a/test/framework/src/main/java/org/elasticsearch/common/settings/MockSecureSettings.java b/test/framework/src/main/java/org/elasticsearch/common/settings/MockSecureSettings.java index 3a6161a9f7fa0..84689cf223d20 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/settings/MockSecureSettings.java +++ b/test/framework/src/main/java/org/elasticsearch/common/settings/MockSecureSettings.java @@ -19,9 +19,12 @@ package org.elasticsearch.common.settings; +import org.elasticsearch.common.hash.MessageDigests; + import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -35,6 +38,7 @@ public class MockSecureSettings implements SecureSettings { private Map secureStrings = new HashMap<>(); private Map files = new HashMap<>(); + private Map sha256Digests = new HashMap<>(); private Set settingNames = new HashSet<>(); private final AtomicBoolean closed = new AtomicBoolean(false); @@ -44,6 +48,7 @@ public MockSecureSettings() { private MockSecureSettings(MockSecureSettings source) { secureStrings.putAll(source.secureStrings); files.putAll(source.files); + sha256Digests.putAll(source.sha256Digests); settingNames.addAll(source.settingNames); } @@ -69,15 +74,22 @@ public InputStream getFile(String setting) { return new ByteArrayInputStream(files.get(setting)); } + @Override + public byte[] getSHA256Digest(String setting) { + return sha256Digests.get(setting); + } + public void setString(String setting, String value) { ensureOpen(); secureStrings.put(setting, new SecureString(value.toCharArray())); + sha256Digests.put(setting, MessageDigests.sha256().digest(value.getBytes(StandardCharsets.UTF_8))); settingNames.add(setting); } public void setFile(String setting, byte[] value) { ensureOpen(); files.put(setting, value); + sha256Digests.put(setting, MessageDigests.sha256().digest(value)); settingNames.add(setting); } @@ -90,6 +102,7 @@ public void merge(MockSecureSettings secureSettings) { } settingNames.addAll(secureSettings.settingNames); secureStrings.putAll(secureSettings.secureStrings); + sha256Digests.putAll(secureSettings.sha256Digests); files.putAll(secureSettings.files); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java index c2a079e519f0f..c6c041a6571b1 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.SecureSettings; import org.elasticsearch.common.settings.SecureString; @@ -179,12 +180,13 @@ private static SecureSettings extractSecureSettings(Settings source, List cache = new HashMap<>(); + final Map> cache = new HashMap<>(); if (sourceSecureSettings != null && securePluginSettings != null) { for (final String settingKey : sourceSecureSettings.getSettingNames()) { for (final Setting secureSetting : securePluginSettings) { if (secureSetting.match(settingKey)) { - cache.put(settingKey, sourceSecureSettings.getString(settingKey)); + cache.put(settingKey, + new Tuple<>(sourceSecureSettings.getString(settingKey), sourceSecureSettings.getSHA256Digest(settingKey))); } } } @@ -197,8 +199,8 @@ public boolean isLoaded() { } @Override - public SecureString getString(String setting) throws GeneralSecurityException { - return cache.get(setting); + public SecureString getString(String setting) { + return cache.get(setting).v1(); } @Override @@ -207,10 +209,15 @@ public Set getSettingNames() { } @Override - public InputStream getFile(String setting) throws GeneralSecurityException { + public InputStream getFile(String setting) { throw new IllegalStateException("A NotificationService setting cannot be File."); } + @Override + public byte[] getSHA256Digest(String setting) { + return cache.get(setting).v2(); + } + @Override public void close() throws IOException { } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java index efbefdd640893..0fa05e900e518 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher.notification; +import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureSettings; import org.elasticsearch.common.settings.SecureString; @@ -16,6 +17,7 @@ import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.security.GeneralSecurityException; import java.util.Arrays; import java.util.Collections; @@ -247,6 +249,11 @@ public InputStream getFile(String setting) throws GeneralSecurityException { return null; } + @Override + public byte[] getSHA256Digest(String setting) throws GeneralSecurityException { + return MessageDigests.sha256().digest(new String(secureSettingsMap.get(setting)).getBytes(StandardCharsets.UTF_8)); + } + @Override public void close() throws IOException { } From b33ffc1ae06035e934277f17c4b5d9851f607056 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sat, 29 Jun 2019 08:59:25 -0700 Subject: [PATCH 074/140] Rename Action to ActionType (#43778) Action is a class that encapsulates meta information about an action that allows it to be called remotely, specifically the action name and response type. With recent refactoring, the action class can now be constructed as a static constant, instead of needing to create a subclass. This makes the old pattern of creating a singleton INSTANCE both misnamed and lacking a common placement. This commit renames Action to ActionType, thus allowing the old INSTANCE naming pattern to be TYPE on the transport action itself. ActionType also conveys that this class is also not the action itself, although this change does not rename any concrete classes as those will be removed organically as they are converted to TYPE constants. relates #34389 --- .../noop/action/bulk/NoopBulkAction.java | 4 +-- .../noop/action/search/NoopSearchAction.java | 4 +-- .../ingest/common/GrokProcessorGetAction.java | 4 +-- .../mustache/MultiSearchTemplateAction.java | 4 +-- .../script/mustache/SearchTemplateAction.java | 4 +-- .../action/PainlessContextAction.java | 4 +-- .../action/PainlessExecuteAction.java | 4 +-- .../index/rankeval/RankEvalAction.java | 6 ++-- .../rankeval/RankEvalRequestBuilder.java | 4 +-- .../AbstractBaseReindexRestHandler.java | 4 +-- .../AbstractBulkByQueryRestHandler.java | 4 +-- .../BulkByScrollParallelizationHelper.java | 8 ++--- .../index/reindex/RethrottleAction.java | 4 +-- .../reindex/RethrottleRequestBuilder.java | 4 +-- .../reindex/AsyncBulkByScrollActionTests.java | 4 +-- .../elasticsearch/action/ActionModule.java | 18 +++++------ .../action/ActionRequestBuilder.java | 4 +-- .../action/{Action.java => ActionType.java} | 8 ++--- ...java => StreamableResponseActionType.java} | 6 ++-- .../action/TransportActionNodeProxy.java | 4 +-- .../ClusterAllocationExplainAction.java | 6 ++-- .../AddVotingConfigExclusionsAction.java | 4 +-- .../ClearVotingConfigExclusionsAction.java | 4 +-- .../cluster/health/ClusterHealthAction.java | 4 +-- .../hotthreads/NodesHotThreadsAction.java | 4 +-- .../cluster/node/info/NodesInfoAction.java | 4 +-- .../NodesReloadSecureSettingsAction.java | 4 +-- .../cluster/node/stats/NodesStatsAction.java | 4 +-- .../node/tasks/cancel/CancelTasksAction.java | 6 ++-- .../cluster/node/tasks/get/GetTaskAction.java | 6 ++-- .../tasks/get/TransportGetTaskAction.java | 2 +- .../node/tasks/list/ListTasksAction.java | 6 ++-- .../cluster/node/usage/NodesUsageAction.java | 4 +-- .../node/usage/NodesUsageRequestBuilder.java | 4 +-- .../cluster/remote/RemoteInfoAction.java | 4 +-- .../delete/DeleteRepositoryAction.java | 4 +-- .../get/GetRepositoriesAction.java | 4 +-- .../repositories/put/PutRepositoryAction.java | 4 +-- .../verify/VerifyRepositoryAction.java | 4 +-- .../cluster/reroute/ClusterRerouteAction.java | 4 +-- .../settings/ClusterUpdateSettingsAction.java | 4 +-- .../shards/ClusterSearchShardsAction.java | 4 +-- .../create/CreateSnapshotAction.java | 4 +-- .../delete/DeleteSnapshotAction.java | 4 +-- .../snapshots/get/GetSnapshotsAction.java | 4 +-- .../restore/RestoreSnapshotAction.java | 4 +-- .../status/SnapshotsStatusAction.java | 4 +-- .../cluster/state/ClusterStateAction.java | 4 +-- .../cluster/stats/ClusterStatsAction.java | 4 +-- .../DeleteStoredScriptAction.java | 4 +-- .../storedscripts/GetStoredScriptAction.java | 4 +-- .../storedscripts/PutStoredScriptAction.java | 4 +-- .../tasks/PendingClusterTasksAction.java | 4 +-- .../indices/alias/IndicesAliasesAction.java | 4 +-- .../alias/get/BaseAliasesRequestBuilder.java | 4 +-- .../indices/alias/get/GetAliasesAction.java | 4 +-- .../admin/indices/analyze/AnalyzeAction.java | 4 +-- .../cache/clear/ClearIndicesCacheAction.java | 4 +-- .../admin/indices/close/CloseIndexAction.java | 4 +-- .../indices/create/CreateIndexAction.java | 4 +-- .../indices/delete/DeleteIndexAction.java | 4 +-- .../admin/indices/flush/FlushAction.java | 4 +-- .../indices/flush/SyncedFlushAction.java | 4 +-- .../indices/forcemerge/ForceMergeAction.java | 4 +-- .../admin/indices/get/GetIndexAction.java | 4 +-- .../mapping/get/GetFieldMappingsAction.java | 4 +-- .../mapping/get/GetMappingsAction.java | 4 +-- .../get/TransportGetFieldMappingsAction.java | 2 +- .../TransportGetFieldMappingsIndexAction.java | 4 +-- .../indices/mapping/put/PutMappingAction.java | 4 +-- .../admin/indices/open/OpenIndexAction.java | 4 +-- .../indices/recovery/RecoveryAction.java | 4 +-- .../admin/indices/refresh/RefreshAction.java | 4 +-- .../indices/rollover/RolloverAction.java | 4 +-- .../segments/IndicesSegmentsAction.java | 4 +-- .../settings/get/GetSettingsAction.java | 4 +-- .../settings/put/UpdateSettingsAction.java | 4 +-- .../IndicesShardStoreRequestBuilder.java | 4 +-- .../shards/IndicesShardStoresAction.java | 6 ++-- .../admin/indices/shrink/ResizeAction.java | 4 +-- .../indices/shrink/ResizeRequestBuilder.java | 4 +-- .../admin/indices/shrink/ShrinkAction.java | 4 +-- .../indices/stats/IndicesStatsAction.java | 4 +-- .../delete/DeleteIndexTemplateAction.java | 4 +-- .../template/get/GetIndexTemplatesAction.java | 4 +-- .../template/put/PutIndexTemplateAction.java | 4 +-- .../upgrade/get/UpgradeStatusAction.java | 4 +-- .../indices/upgrade/post/UpgradeAction.java | 4 +-- .../upgrade/post/UpgradeSettingsAction.java | 4 +-- .../validate/query/ValidateQueryAction.java | 4 +-- .../elasticsearch/action/bulk/BulkAction.java | 4 +-- .../action/bulk/TransportBulkAction.java | 2 +- .../action/bulk/TransportShardBulkAction.java | 6 ++-- .../action/delete/DeleteAction.java | 4 +-- .../action/explain/ExplainAction.java | 4 +-- .../fieldcaps/FieldCapabilitiesAction.java | 4 +-- .../TransportFieldCapabilitiesAction.java | 2 +- ...TransportFieldCapabilitiesIndexAction.java | 6 ++-- .../elasticsearch/action/get/GetAction.java | 4 +-- .../action/get/MultiGetAction.java | 4 +-- .../action/get/TransportMultiGetAction.java | 2 +- .../get/TransportShardMultiGetAction.java | 4 +-- .../action/index/IndexAction.java | 4 +-- .../action/ingest/DeletePipelineAction.java | 4 +-- .../action/ingest/GetPipelineAction.java | 4 +-- .../action/ingest/IngestActionForwarder.java | 4 +-- .../action/ingest/PutPipelineAction.java | 4 +-- .../action/ingest/SimulatePipelineAction.java | 4 +-- .../elasticsearch/action/main/MainAction.java | 4 +-- .../action/search/ClearScrollAction.java | 4 +-- .../action/search/MultiSearchAction.java | 4 +-- .../action/search/SearchAction.java | 4 +-- .../action/search/SearchScrollAction.java | 4 +-- .../BroadcastOperationRequestBuilder.java | 4 +-- .../master/AcknowledgedRequestBuilder.java | 4 +-- .../MasterNodeOperationRequestBuilder.java | 4 +-- ...MasterNodeReadOperationRequestBuilder.java | 4 +-- .../info/ClusterInfoRequestBuilder.java | 4 +-- .../nodes/NodesOperationRequestBuilder.java | 4 +-- .../ReplicationRequestBuilder.java | 4 +-- .../InstanceShardOperationRequestBuilder.java | 4 +-- .../SingleShardOperationRequestBuilder.java | 4 +-- .../support/tasks/TasksRequestBuilder.java | 4 +-- .../termvectors/MultiTermVectorsAction.java | 4 +-- .../action/termvectors/TermVectorsAction.java | 4 +-- .../TransportMultiTermVectorsAction.java | 2 +- .../TransportShardMultiTermsVectorAction.java | 6 ++-- .../action/update/UpdateAction.java | 4 +-- .../client/ElasticsearchClient.java | 10 +++---- .../elasticsearch/client/FilterClient.java | 4 +-- .../client/OriginSettingClient.java | 4 +-- .../client/ParentTaskAssigningClient.java | 4 +-- .../elasticsearch/client/node/NodeClient.java | 30 +++++++++---------- .../client/support/AbstractClient.java | 18 +++++------ .../AbstractBulkByScrollRequestBuilder.java | 4 +-- ...stractBulkIndexByScrollRequestBuilder.java | 6 ++-- .../index/reindex/DeleteByQueryAction.java | 4 +-- .../reindex/DeleteByQueryRequestBuilder.java | 6 ++-- .../index/reindex/ReindexAction.java | 4 +-- .../index/reindex/ReindexRequestBuilder.java | 6 ++-- .../index/reindex/UpdateByQueryAction.java | 4 +-- .../reindex/UpdateByQueryRequestBuilder.java | 6 ++-- .../index/seqno/RetentionLeaseActions.java | 8 ++--- .../java/org/elasticsearch/node/Node.java | 4 +-- .../CompletionPersistentTaskAction.java | 6 ++-- .../persistent/PersistentTasksService.java | 4 +-- .../RemovePersistentTaskAction.java | 4 +-- .../persistent/StartPersistentTaskAction.java | 4 +-- .../UpdatePersistentTaskStatusAction.java | 4 +-- .../elasticsearch/plugins/ActionPlugin.java | 14 ++++----- .../transport/RemoteClusterAwareClient.java | 4 +-- .../action/ActionModuleTests.java | 2 +- .../org/elasticsearch/action/ActionTests.java | 2 +- .../cluster/node/tasks/TestTaskPlugin.java | 12 ++++---- .../bulk/TransportBulkActionTookTests.java | 4 +-- .../client/AbstractClientHeadersTestCase.java | 6 ++-- .../client/OriginSettingClientTests.java | 4 +-- .../ParentTaskAssigningClientTests.java | 4 +-- .../client/node/NodeClientHeadersTests.java | 10 +++---- .../InternalOrPrivateSettingsPlugin.java | 4 +-- .../persistent/TestPersistentTasksPlugin.java | 4 +-- .../indices/RestValidateQueryActionTests.java | 4 +-- .../snapshots/SnapshotResiliencyTests.java | 6 ++-- .../elasticsearch/test/client/NoOpClient.java | 4 +-- .../xpack/ccr/CcrLicenseChecker.java | 6 ++-- .../xpack/ccr/action/ShardChangesAction.java | 4 +-- .../bulk/BulkShardOperationsAction.java | 4 +-- .../ClearCcrRestoreSessionAction.java | 4 +-- .../DeleteInternalCcrRepositoryAction.java | 4 +-- .../GetCcrRestoreFileChunkAction.java | 4 +-- .../PutCcrRestoreSessionAction.java | 4 +-- .../PutInternalCcrRepositoryAction.java | 4 +-- .../license/DeleteLicenseAction.java | 4 +-- .../license/GetBasicStatusAction.java | 4 +-- .../license/GetLicenseAction.java | 4 +-- .../license/GetTrialStatusAction.java | 4 +-- .../license/PostStartBasicAction.java | 4 +-- .../license/PostStartTrialAction.java | 4 +-- .../license/PutLicenseAction.java | 4 +-- .../xpack/core/ClientHelper.java | 6 ++-- .../xpack/core/XPackClientPlugin.java | 4 +-- .../elasticsearch/xpack/core/XPackPlugin.java | 6 ++-- .../core/action/ReloadAnalyzerAction.java | 4 +-- .../action/TransportFreezeIndexAction.java | 4 +-- .../xpack/core/action/XPackInfoAction.java | 4 +-- .../core/action/XPackInfoFeatureAction.java | 6 ++-- .../xpack/core/action/XPackUsageAction.java | 4 +-- .../core/action/XPackUsageFeatureAction.java | 6 ++-- .../xpack/core/ccr/action/CcrStatsAction.java | 4 +-- .../action/DeleteAutoFollowPatternAction.java | 4 +-- .../core/ccr/action/FollowInfoAction.java | 4 +-- .../core/ccr/action/FollowStatsAction.java | 4 +-- .../core/ccr/action/ForgetFollowerAction.java | 4 +-- .../action/GetAutoFollowPatternAction.java | 4 +-- .../core/ccr/action/PauseFollowAction.java | 4 +-- .../action/PutAutoFollowPatternAction.java | 4 +-- .../core/ccr/action/PutFollowAction.java | 4 +-- .../core/ccr/action/ResumeFollowAction.java | 4 +-- .../xpack/core/ccr/action/UnfollowAction.java | 4 +-- .../DeleteDataFrameTransformAction.java | 4 +-- .../action/GetDataFrameTransformsAction.java | 4 +-- .../GetDataFrameTransformsStatsAction.java | 4 +-- .../PreviewDataFrameTransformAction.java | 4 +-- .../action/PutDataFrameTransformAction.java | 4 +-- .../action/StartDataFrameTransformAction.java | 4 +-- .../StartDataFrameTransformTaskAction.java | 4 +-- .../action/StopDataFrameTransformAction.java | 4 +-- .../deprecation/DeprecationInfoAction.java | 4 +-- .../NodesDeprecationCheckAction.java | 8 ++--- .../core/graph/action/GraphExploreAction.java | 4 +-- .../action/DeleteLifecycleAction.java | 4 +-- .../action/ExplainLifecycleAction.java | 4 +-- .../action/GetLifecycleAction.java | 4 +-- .../action/GetStatusAction.java | 4 +-- .../action/MoveToStepAction.java | 4 +-- .../action/PutLifecycleAction.java | 4 +-- .../RemoveIndexLifecyclePolicyAction.java | 4 +-- .../indexlifecycle/action/RetryAction.java | 4 +-- .../indexlifecycle/action/StartILMAction.java | 4 +-- .../indexlifecycle/action/StopILMAction.java | 4 +-- .../xpack/core/ml/action/CloseJobAction.java | 4 +-- .../core/ml/action/DeleteCalendarAction.java | 4 +-- .../ml/action/DeleteCalendarEventAction.java | 4 +-- .../DeleteDataFrameAnalyticsAction.java | 4 +-- .../core/ml/action/DeleteDatafeedAction.java | 4 +-- .../ml/action/DeleteExpiredDataAction.java | 4 +-- .../core/ml/action/DeleteFilterAction.java | 4 +-- .../core/ml/action/DeleteForecastAction.java | 4 +-- .../xpack/core/ml/action/DeleteJobAction.java | 4 +-- .../ml/action/DeleteModelSnapshotAction.java | 4 +-- .../ml/action/EvaluateDataFrameAction.java | 4 +-- .../ml/action/FinalizeJobExecutionAction.java | 4 +-- .../ml/action/FindFileStructureAction.java | 4 +-- .../xpack/core/ml/action/FlushJobAction.java | 4 +-- .../core/ml/action/ForecastJobAction.java | 4 +-- .../core/ml/action/GetBucketsAction.java | 4 +-- .../ml/action/GetCalendarEventsAction.java | 4 +-- .../core/ml/action/GetCalendarsAction.java | 4 +-- .../core/ml/action/GetCategoriesAction.java | 4 +-- .../action/GetDataFrameAnalyticsAction.java | 4 +-- .../GetDataFrameAnalyticsStatsAction.java | 4 +-- .../core/ml/action/GetDatafeedsAction.java | 4 +-- .../ml/action/GetDatafeedsStatsAction.java | 4 +-- .../core/ml/action/GetFiltersAction.java | 4 +-- .../core/ml/action/GetInfluencersAction.java | 4 +-- .../xpack/core/ml/action/GetJobsAction.java | 4 +-- .../core/ml/action/GetJobsStatsAction.java | 4 +-- .../ml/action/GetModelSnapshotsAction.java | 4 +-- .../ml/action/GetOverallBucketsAction.java | 4 +-- .../core/ml/action/GetRecordsAction.java | 4 +-- .../core/ml/action/IsolateDatafeedAction.java | 4 +-- .../core/ml/action/KillProcessAction.java | 4 +-- .../xpack/core/ml/action/MlInfoAction.java | 4 +-- .../xpack/core/ml/action/OpenJobAction.java | 4 +-- .../core/ml/action/PersistJobAction.java | 4 +-- .../ml/action/PostCalendarEventsAction.java | 4 +-- .../xpack/core/ml/action/PostDataAction.java | 4 +-- .../core/ml/action/PreviewDatafeedAction.java | 4 +-- .../core/ml/action/PutCalendarAction.java | 4 +-- .../action/PutDataFrameAnalyticsAction.java | 4 +-- .../core/ml/action/PutDatafeedAction.java | 4 +-- .../xpack/core/ml/action/PutFilterAction.java | 4 +-- .../xpack/core/ml/action/PutJobAction.java | 4 +-- .../ml/action/RevertModelSnapshotAction.java | 4 +-- .../core/ml/action/SetUpgradeModeAction.java | 4 +-- .../action/StartDataFrameAnalyticsAction.java | 4 +-- .../core/ml/action/StartDatafeedAction.java | 4 +-- .../action/StopDataFrameAnalyticsAction.java | 4 +-- .../core/ml/action/StopDatafeedAction.java | 4 +-- .../ml/action/UpdateCalendarJobAction.java | 4 +-- .../core/ml/action/UpdateDatafeedAction.java | 4 +-- .../core/ml/action/UpdateFilterAction.java | 4 +-- .../xpack/core/ml/action/UpdateJobAction.java | 4 +-- .../ml/action/UpdateModelSnapshotAction.java | 4 +-- .../core/ml/action/UpdateProcessAction.java | 4 +-- .../ml/action/ValidateDetectorAction.java | 4 +-- .../ml/action/ValidateJobConfigAction.java | 4 +-- .../action/MonitoringBulkAction.java | 4 +-- .../rollup/action/DeleteRollupJobAction.java | 4 +-- .../rollup/action/GetRollupCapsAction.java | 4 +-- .../action/GetRollupIndexCapsAction.java | 4 +-- .../rollup/action/GetRollupJobsAction.java | 4 +-- .../rollup/action/PutRollupJobAction.java | 4 +-- .../rollup/action/RollupSearchAction.java | 4 +-- .../rollup/action/StartRollupJobAction.java | 4 +-- .../rollup/action/StopRollupJobAction.java | 4 +-- .../security/action/CreateApiKeyAction.java | 6 ++-- .../core/security/action/GetApiKeyAction.java | 6 ++-- .../action/InvalidateApiKeyAction.java | 6 ++-- .../oidc/OpenIdConnectAuthenticateAction.java | 6 ++-- .../oidc/OpenIdConnectLogoutAction.java | 4 +-- ...nIdConnectPrepareAuthenticationAction.java | 4 +-- .../privilege/DeletePrivilegesAction.java | 6 ++-- .../action/privilege/GetPrivilegesAction.java | 6 ++-- .../action/privilege/PutPrivilegesAction.java | 6 ++-- .../action/realm/ClearRealmCacheAction.java | 4 +-- .../action/role/ClearRolesCacheAction.java | 4 +-- .../action/role/DeleteRoleAction.java | 6 ++-- .../security/action/role/GetRolesAction.java | 6 ++-- .../security/action/role/PutRoleAction.java | 6 ++-- .../rolemapping/DeleteRoleMappingAction.java | 6 ++-- .../rolemapping/GetRoleMappingsAction.java | 6 ++-- .../rolemapping/PutRoleMappingAction.java | 6 ++-- .../action/saml/SamlAuthenticateAction.java | 6 ++-- .../saml/SamlInvalidateSessionAction.java | 6 ++-- .../action/saml/SamlLogoutAction.java | 6 ++-- .../saml/SamlPrepareAuthenticationAction.java | 6 ++-- .../action/token/CreateTokenAction.java | 6 ++-- .../token/CreateTokenRequestBuilder.java | 4 +-- .../action/token/InvalidateTokenAction.java | 6 ++-- .../action/token/RefreshTokenAction.java | 4 +-- .../action/user/AuthenticateAction.java | 4 +-- .../action/user/ChangePasswordAction.java | 4 +-- .../action/user/DeleteUserAction.java | 6 ++-- .../action/user/GetUserPrivilegesAction.java | 6 ++-- .../security/action/user/GetUsersAction.java | 6 ++-- .../action/user/HasPrivilegesAction.java | 4 +-- .../security/action/user/PutUserAction.java | 6 ++-- .../action/user/SetEnabledAction.java | 4 +-- .../authz/privilege/ApplicationPrivilege.java | 2 +- .../ssl/action/GetCertificateInfoAction.java | 4 +-- .../upgrade/actions/IndexUpgradeAction.java | 4 +-- .../actions/IndexUpgradeInfoAction.java | 4 +-- .../transport/actions/ack/AckWatchAction.java | 4 +-- .../actions/activate/ActivateWatchAction.java | 4 +-- .../actions/delete/DeleteWatchAction.java | 4 +-- .../actions/execute/ExecuteWatchAction.java | 4 +-- .../transport/actions/get/GetWatchAction.java | 4 +-- .../transport/actions/put/PutWatchAction.java | 4 +-- .../actions/service/WatcherServiceAction.java | 4 +-- .../actions/stats/WatcherStatsAction.java | 6 ++-- ...meTransformCheckpointServiceNodeTests.java | 6 ++-- .../transforms/pivot/PivotTests.java | 6 ++-- .../LifecyclePolicySecurityClient.java | 6 ++-- .../persistence/JobDataCountsPersister.java | 2 +- .../action/oauth2/RestGetTokenAction.java | 4 +-- ...sportSamlInvalidateSessionActionTests.java | 4 +-- .../authc/esnative/NativeUsersStoreTests.java | 4 +-- .../store/NativePrivilegeStoreTests.java | 4 +-- .../apikey/RestCreateApiKeyActionTests.java | 7 ++--- .../apikey/RestGetApiKeyActionTests.java | 7 ++--- .../RestInvalidateApiKeyActionTests.java | 7 ++--- .../support/SecurityIndexManagerTests.java | 6 ++-- .../sql/action/SqlClearCursorAction.java | 4 +-- .../xpack/sql/action/SqlQueryAction.java | 4 +-- .../xpack/sql/action/SqlTranslateAction.java | 4 +-- .../xpack/sql/plugin/SqlStatsAction.java | 4 +-- 347 files changed, 787 insertions(+), 790 deletions(-) rename server/src/main/java/org/elasticsearch/action/{Action.java => ActionType.java} (89%) rename server/src/main/java/org/elasticsearch/action/{StreamableResponseAction.java => StreamableResponseActionType.java} (85%) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java index 40c65aee70074..e6412099fee72 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.plugin.noop.action.bulk; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; import org.elasticsearch.action.bulk.BulkResponse; -public class NoopBulkAction extends StreamableResponseAction { +public class NoopBulkAction extends StreamableResponseActionType { public static final String NAME = "mock:data/write/bulk"; public static final NoopBulkAction INSTANCE = new NoopBulkAction(); diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java index aa316ae435ac3..fb83bda148b11 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java @@ -18,11 +18,11 @@ */ package org.elasticsearch.plugin.noop.action.search; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.io.stream.Writeable; -public class NoopSearchAction extends Action { +public class NoopSearchAction extends ActionType { public static final NoopSearchAction INSTANCE = new NoopSearchAction(); public static final String NAME = "mock:data/read/search"; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index ee2f49390b8db..f6eadab8014d2 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.node.NodeClient; @@ -45,7 +45,7 @@ import static org.elasticsearch.ingest.common.IngestCommonPlugin.GROK_PATTERNS; import static org.elasticsearch.rest.RestRequest.Method.GET; -public class GrokProcessorGetAction extends StreamableResponseAction { +public class GrokProcessorGetAction extends StreamableResponseActionType { static final GrokProcessorGetAction INSTANCE = new GrokProcessorGetAction(); static final String NAME = "cluster:admin/ingest/processor/grok/get"; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java index 573c5888991d1..a096a89951e7c 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java @@ -19,10 +19,10 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.common.io.stream.Writeable; -public class MultiSearchTemplateAction extends Action { +public class MultiSearchTemplateAction extends ActionType { public static final MultiSearchTemplateAction INSTANCE = new MultiSearchTemplateAction(); public static final String NAME = "indices:data/read/msearch/template"; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java index 7bd57154e26fc..2e8417c993990 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java @@ -19,10 +19,10 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.common.io.stream.Writeable; -public class SearchTemplateAction extends Action { +public class SearchTemplateAction extends ActionType { public static final SearchTemplateAction INSTANCE = new SearchTemplateAction(); public static final String NAME = "indices:data/read/search/template"; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java index 4abad4d78af3e..5e941a2d0e2e0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.painless.action; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -64,7 +64,7 @@ * retrieves all available information about the API for this specific context * */ -public class PainlessContextAction extends Action { +public class PainlessContextAction extends ActionType { public static final PainlessContextAction INSTANCE = new PainlessContextAction(); private static final String NAME = "cluster:admin/scripts/painless/context"; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 349a3281b4ca9..d0a5d5cc9611b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -30,7 +30,7 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.store.RAMDirectory; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; @@ -89,7 +89,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; -public class PainlessExecuteAction extends Action { +public class PainlessExecuteAction extends ActionType { public static final PainlessExecuteAction INSTANCE = new PainlessExecuteAction(); private static final String NAME = "cluster:admin/scripts/painless/execute"; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java index 664377786f87f..07de8c8a22cad 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; /** - * Action for explaining evaluating search ranking results. + * ActionType for explaining evaluating search ranking results. */ -public class RankEvalAction extends StreamableResponseAction { +public class RankEvalAction extends StreamableResponseActionType { public static final RankEvalAction INSTANCE = new RankEvalAction(); public static final String NAME = "indices:data/read/rank_eval"; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java index 4108a817f046e..1bfb576c37911 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java @@ -19,13 +19,13 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; public class RankEvalRequestBuilder extends ActionRequestBuilder { - public RankEvalRequestBuilder(ElasticsearchClient client, Action action, + public RankEvalRequestBuilder(ElasticsearchClient client, ActionType action, RankEvalRequest request) { super(client, action, request); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java index 8111aac39451b..63451abb7ccd7 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; @@ -38,7 +38,7 @@ public abstract class AbstractBaseReindexRestHandler< Request extends AbstractBulkByScrollRequest, - A extends Action + A extends ActionType > extends BaseRestHandler { private final A action; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java index fbfd1007a7e02..3124d9cb0be0c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -38,7 +38,7 @@ */ public abstract class AbstractBulkByQueryRestHandler< Request extends AbstractBulkByScrollRequest, - A extends Action> extends AbstractBaseReindexRestHandler { + A extends ActionType> extends AbstractBaseReindexRestHandler { protected AbstractBulkByQueryRestHandler(Settings settings, A action) { super(settings, action); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java index dfb8deac58c3b..6df73414e81f3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; @@ -61,7 +61,7 @@ private BulkByScrollParallelizationHelper() {} static > void startSlicedAction( Request request, BulkByScrollTask task, - Action action, + ActionType action, ActionListener listener, Client client, DiscoveryNode node, @@ -85,7 +85,7 @@ static > void startSlicedAc private static > void sliceConditionally( Request request, BulkByScrollTask task, - Action action, + ActionType action, ActionListener listener, Client client, DiscoveryNode node, @@ -118,7 +118,7 @@ private static int countSlicesBasedOnShards(ClusterSearchShardsResponse response private static > void sendSubRequests( Client client, - Action action, + ActionType action, String localNodeId, BulkByScrollTask task, Request request, diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java index 513b4261bdf52..cf04d6d856ddb 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java @@ -19,11 +19,11 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.common.io.stream.Writeable; -public class RethrottleAction extends Action { +public class RethrottleAction extends ActionType { public static final RethrottleAction INSTANCE = new RethrottleAction(); public static final String NAME = "cluster:admin/reindex/rethrottle"; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java index 25407e6dc93d5..648eb6e441b1a 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.tasks.TasksRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; @@ -29,7 +29,7 @@ */ public class RethrottleRequestBuilder extends TasksRequestBuilder { public RethrottleRequestBuilder(ElasticsearchClient client, - Action action) { + ActionType action) { super(client, action, new RethrottleRequest()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index bdedc65b7a6d3..3d28ce3bcbc96 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -761,7 +761,7 @@ private class MyMockClient extends FilterClient { @Override @SuppressWarnings("unchecked") protected - void doExecute(Action action, Request request, ActionListener listener) { + void doExecute(ActionType action, Request request, ActionListener listener) { if (false == expectedHeaders.equals(threadPool().getThreadContext().getHeaders())) { listener.onFailure( new RuntimeException("Expected " + expectedHeaders + " but got " + threadPool().getThreadContext().getHeaders())); diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 401f777cdb7cd..f871d972fc00b 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -412,7 +412,7 @@ public void register(ActionHandler handler) { } public void register( - Action action, Class> transportAction) { + ActionType action, Class> transportAction) { register(new ActionHandler<>(action, transportAction)); } } @@ -460,7 +460,7 @@ public void reg actions.register(CloseIndexAction.INSTANCE, TransportCloseIndexAction.class); actions.register(GetMappingsAction.INSTANCE, TransportGetMappingsAction.class); actions.register(GetFieldMappingsAction.INSTANCE, TransportGetFieldMappingsAction.class); - actions.register(TransportGetFieldMappingsIndexAction.ACTION_INSTANCE, TransportGetFieldMappingsIndexAction.class); + actions.register(TransportGetFieldMappingsIndexAction.TYPE, TransportGetFieldMappingsIndexAction.class); actions.register(PutMappingAction.INSTANCE, TransportPutMappingAction.class); actions.register(IndicesAliasesAction.INSTANCE, TransportIndicesAliasesAction.class); actions.register(UpdateSettingsAction.INSTANCE, TransportUpdateSettingsAction.class); @@ -484,13 +484,13 @@ public void reg actions.register(GetAction.INSTANCE, TransportGetAction.class); actions.register(TermVectorsAction.INSTANCE, TransportTermVectorsAction.class); actions.register(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class); - actions.register(TransportShardMultiTermsVectorAction.ACTION_INSTANCE, TransportShardMultiTermsVectorAction.class); + actions.register(TransportShardMultiTermsVectorAction.TYPE, TransportShardMultiTermsVectorAction.class); actions.register(DeleteAction.INSTANCE, TransportDeleteAction.class); actions.register(UpdateAction.INSTANCE, TransportUpdateAction.class); actions.register(MultiGetAction.INSTANCE, TransportMultiGetAction.class); - actions.register(TransportShardMultiGetAction.ACTION_INSTANCE, TransportShardMultiGetAction.class); + actions.register(TransportShardMultiGetAction.TYPE, TransportShardMultiGetAction.class); actions.register(BulkAction.INSTANCE, TransportBulkAction.class); - actions.register(TransportShardBulkAction.ACTION_INSTANCE, TransportShardBulkAction.class); + actions.register(TransportShardBulkAction.TYPE, TransportShardBulkAction.class); actions.register(SearchAction.INSTANCE, TransportSearchAction.class); actions.register(SearchScrollAction.INSTANCE, TransportSearchScrollAction.class); actions.register(MultiSearchAction.INSTANCE, TransportMultiSearchAction.class); @@ -505,7 +505,7 @@ public void reg actions.register(DeleteStoredScriptAction.INSTANCE, TransportDeleteStoredScriptAction.class); actions.register(FieldCapabilitiesAction.INSTANCE, TransportFieldCapabilitiesAction.class); - actions.register(TransportFieldCapabilitiesIndexAction.ACTION_INSTANCE, TransportFieldCapabilitiesIndexAction.class); + actions.register(TransportFieldCapabilitiesIndexAction.TYPE, TransportFieldCapabilitiesIndexAction.class); actions.register(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); actions.register(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); @@ -684,10 +684,10 @@ protected void configure() { bind(AutoCreateIndex.class).toInstance(autoCreateIndex); bind(TransportLivenessAction.class).asEagerSingleton(); - // register Action -> transportAction Map used by NodeClient + // register ActionType -> transportAction Map used by NodeClient @SuppressWarnings("rawtypes") - MapBinder transportActionsBinder - = MapBinder.newMapBinder(binder(), Action.class, TransportAction.class); + MapBinder transportActionsBinder + = MapBinder.newMapBinder(binder(), ActionType.class, TransportAction.class); for (ActionHandler action : actions.values()) { // bind the action as eager singleton, so the map binder one will reuse it bind(action.getTransportAction()).asEagerSingleton(); diff --git a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index a3aa8ac2a5222..166bec9e065b0 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -26,11 +26,11 @@ public abstract class ActionRequestBuilder { - protected final Action action; + protected final ActionType action; protected final Request request; protected final ElasticsearchClient client; - protected ActionRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected ActionRequestBuilder(ElasticsearchClient client, ActionType action, Request request) { Objects.requireNonNull(action, "action must not be null"); this.action = action; this.request = request; diff --git a/server/src/main/java/org/elasticsearch/action/Action.java b/server/src/main/java/org/elasticsearch/action/ActionType.java similarity index 89% rename from server/src/main/java/org/elasticsearch/action/Action.java rename to server/src/main/java/org/elasticsearch/action/ActionType.java index 0037533797d27..02f8f3c6fc22f 100644 --- a/server/src/main/java/org/elasticsearch/action/Action.java +++ b/server/src/main/java/org/elasticsearch/action/ActionType.java @@ -26,7 +26,7 @@ /** * A generic action. Should strive to make it a singleton. */ -public class Action { +public class ActionType { private final String name; private final Writeable.Reader responseReader; @@ -36,7 +36,7 @@ public class Action { * @deprecated Pass a {@link Writeable.Reader} with {@link } */ @Deprecated - protected Action(String name) { + protected ActionType(String name) { this(name, null); } @@ -44,7 +44,7 @@ protected Action(String name) { * @param name The name of the action, must be unique across actions. * @param responseReader A reader for the response type */ - public Action(String name, Writeable.Reader responseReader) { + public ActionType(String name, Writeable.Reader responseReader) { this.name = name; this.responseReader = responseReader; } @@ -72,7 +72,7 @@ public TransportRequestOptions transportOptions(Settings settings) { @Override public boolean equals(Object o) { - return o instanceof Action && name.equals(((Action) o).name()); + return o instanceof ActionType && name.equals(((ActionType) o).name()); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/StreamableResponseAction.java b/server/src/main/java/org/elasticsearch/action/StreamableResponseActionType.java similarity index 85% rename from server/src/main/java/org/elasticsearch/action/StreamableResponseAction.java rename to server/src/main/java/org/elasticsearch/action/StreamableResponseActionType.java index c7eecfc35d738..b8206bb03f89d 100644 --- a/server/src/main/java/org/elasticsearch/action/StreamableResponseAction.java +++ b/server/src/main/java/org/elasticsearch/action/StreamableResponseActionType.java @@ -23,12 +23,12 @@ /** * An action for with the response type implements {@link org.elasticsearch.common.io.stream.Streamable}. - * @deprecated Use {@link Action} directly and provide a {@link Writeable.Reader} + * @deprecated Use {@link ActionType} directly and provide a {@link Writeable.Reader} */ @Deprecated -public abstract class StreamableResponseAction extends Action { +public abstract class StreamableResponseActionType extends ActionType { - protected StreamableResponseAction(String name) { + protected StreamableResponseActionType(String name) { super(name); } diff --git a/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java b/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java index a4c3e17e80208..4319a745ba816 100644 --- a/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java +++ b/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java @@ -30,10 +30,10 @@ public class TransportActionNodeProxy { private final TransportService transportService; - private final Action action; + private final ActionType action; private final TransportRequestOptions transportOptions; - public TransportActionNodeProxy(Settings settings, Action action, TransportService transportService) { + public TransportActionNodeProxy(Settings settings, ActionType action, TransportService transportService) { this.action = action; this.transportService = transportService; this.transportOptions = action.transportOptions(settings); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java index b4b348ae97ee0..acaaed9eaa985 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.cluster.allocation; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; /** - * Action for explaining shard allocation for a shard in the cluster + * ActionType for explaining shard allocation for a shard in the cluster */ -public class ClusterAllocationExplainAction extends StreamableResponseAction { +public class ClusterAllocationExplainAction extends StreamableResponseActionType { public static final ClusterAllocationExplainAction INSTANCE = new ClusterAllocationExplainAction(); public static final String NAME = "cluster:monitor/allocation/explain"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java index 20f1e3c50443e..a2f0c721b5da4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.action.admin.cluster.configuration; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.common.io.stream.Writeable.Reader; -public class AddVotingConfigExclusionsAction extends Action { +public class AddVotingConfigExclusionsAction extends ActionType { public static final AddVotingConfigExclusionsAction INSTANCE = new AddVotingConfigExclusionsAction(); public static final String NAME = "cluster:admin/voting_config/add_exclusions"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java index 6cafcb7653f91..6091800693f49 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.action.admin.cluster.configuration; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.common.io.stream.Writeable.Reader; -public class ClearVotingConfigExclusionsAction extends Action { +public class ClearVotingConfigExclusionsAction extends ActionType { public static final ClearVotingConfigExclusionsAction INSTANCE = new ClearVotingConfigExclusionsAction(); public static final String NAME = "cluster:admin/voting_config/clear_exclusions"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java index ceb2a145fb64a..e8f5ecfaf5b66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.health; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; -public class ClusterHealthAction extends StreamableResponseAction { +public class ClusterHealthAction extends StreamableResponseActionType { public static final ClusterHealthAction INSTANCE = new ClusterHealthAction(); public static final String NAME = "cluster:monitor/health"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java index 317fa984163e0..4833625d29522 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; -public class NodesHotThreadsAction extends StreamableResponseAction { +public class NodesHotThreadsAction extends StreamableResponseActionType { public static final NodesHotThreadsAction INSTANCE = new NodesHotThreadsAction(); public static final String NAME = "cluster:monitor/nodes/hot_threads"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java index b860f07c8ff99..e94390d8f92d8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.node.info; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; -public class NodesInfoAction extends StreamableResponseAction { +public class NodesInfoAction extends StreamableResponseActionType { public static final NodesInfoAction INSTANCE = new NodesInfoAction(); public static final String NAME = "cluster:monitor/nodes/info"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java index 0a0c8a74fe9d1..e22595c187092 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java @@ -19,10 +19,10 @@ package org.elasticsearch.action.admin.cluster.node.reload; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; public class NodesReloadSecureSettingsAction - extends StreamableResponseAction { + extends StreamableResponseActionType { public static final NodesReloadSecureSettingsAction INSTANCE = new NodesReloadSecureSettingsAction(); public static final String NAME = "cluster:admin/nodes/reload_secure_settings"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java index 1febe1b4872f3..dbe7deed74a73 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.cluster.node.stats; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; -public class NodesStatsAction extends StreamableResponseAction { +public class NodesStatsAction extends StreamableResponseActionType { public static final NodesStatsAction INSTANCE = new NodesStatsAction(); public static final String NAME = "cluster:monitor/nodes/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java index 39532d18519c3..a0fa139dc7364 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.admin.cluster.node.tasks.cancel; -import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionType; import org.elasticsearch.common.io.stream.Writeable; /** - * Action for cancelling running tasks + * ActionType for cancelling running tasks */ -public class CancelTasksAction extends Action { +public class CancelTasksAction extends ActionType { public static final CancelTasksAction INSTANCE = new CancelTasksAction(); public static final String NAME = "cluster:admin/tasks/cancel"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java index cdb5bbc39068a..978e07555b517 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java @@ -19,12 +19,12 @@ package org.elasticsearch.action.admin.cluster.node.tasks.get; -import org.elasticsearch.action.StreamableResponseAction; +import org.elasticsearch.action.StreamableResponseActionType; /** - * Action for retrieving a list of currently running tasks + * ActionType for retrieving a list of currently running tasks */ -public class GetTaskAction extends StreamableResponseAction { +public class GetTaskAction extends StreamableResponseActionType { public static final String TASKS_ORIGIN = "tasks"; public static final GetTaskAction INSTANCE = new GetTaskAction(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index d1d72da544560..2b0ac0233be29 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -56,7 +56,7 @@ import static org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction.waitForCompletionTimeout; /** - * Action to get a single task. If the task isn't running then it'll try to request the status from request index. + * ActionType to get a single task. If the task isn't running then it'll try to request the status from request index. * * The general flow is: *